setwd("C:/Users/horat/Desktop/CSIROIntership/soilCode")
The working directory was changed to C:/Users/horat/Desktop/CSIROIntership/soilCode inside a notebook chunk. The working directory will be reset when the chunk is finished running. Use the knitr root.dir option in the setup chunk to change the working directory for notebook chunks.
library(dplyr)
Registered S3 method overwritten by 'dplyr':
  method           from
  print.rowwise_df     

Attaching package: 愼㸱愼㹥dplyr愼㸱愼㹦

The following objects are masked from 愼㸱愼㹥package:stats愼㸱愼㹦:

    filter, lag

The following objects are masked from 愼㸱愼㹥package:base愼㸱愼㹦:

    intersect, setdiff, setequal, union
#create pivot table 
library(reshape)

Attaching package: 愼㸱愼㹥reshape愼㸱愼㹦

The following object is masked from 愼㸱愼㹥package:dplyr愼㸱愼㹦:

    rename
library(data.table)
data.table 1.12.8 using 4 threads (see ?getDTthreads).  Latest news: r-datatable.com

Attaching package: 愼㸱愼㹥data.table愼㸱愼㹦

The following object is masked from 愼㸱愼㹥package:reshape愼㸱愼㹦:

    melt

The following objects are masked from 愼㸱愼㹥package:dplyr愼㸱愼㹦:

    between, first, last
#data partition seperate trainset and testset
library (caTools)

library(caret)
Loading required package: lattice
Loading required package: ggplot2
#svm library due to limitation of iterations change the library
library(e1071)
library(LiblineaR)

#random forest
library(randomForest)
randomForest 4.6-14
Type rfNews() to see new features/changes/bug fixes.

Attaching package: 愼㸱愼㹥randomForest愼㸱愼㹦

The following object is masked from 愼㸱愼㹥package:ggplot2愼㸱愼㹦:

    margin

The following object is masked from 愼㸱愼㹥package:dplyr愼㸱愼㹦:

    combine
#ID4 Decision Tree classifier(CART)
library(rpart)
library(rpart.plot)
library(rattle)
Rattle: A free graphical interface for data science with R.
XXXX 5.3.0 Copyright (c) 2006-2018 Togaware Pty Ltd.
戼㹣昼㹣挼㸸攼㹢'rattle()'ȥ挼㸷攼㸱ҡ愼㸱愼㸲戼㹢ζ愼㹦愼㸱愼㸲戼㸷愼㹤戼㸹昼㸶挼㸴攼㸳戼㸵挼㸴挼㹡昼㹤戼㹥ݡ愼㸳

Attaching package: 愼㸱愼㹥rattle愼㸱愼㹦

The following object is masked from 愼㸱愼㹥package:randomForest愼㸱愼㹦:

    importance
#xgboost
library(xgboost)

Attaching package: 愼㸱愼㹥xgboost愼㸱愼㹦

The following object is masked from 愼㸱愼㹥package:rattle愼㸱愼㹦:

    xgboost

The following object is masked from 愼㸱愼㹥package:dplyr愼㸱愼㹦:

    slice
#for knn classification
library(class)

Attaching package: 愼㸱愼㹥class愼㸱愼㹦

The following object is masked from 愼㸱愼㹥package:reshape愼㸱愼㹦:

    condense
#install neuralnetwork
library(neuralnet)

Attaching package: 愼㸱愼㹥neuralnet愼㸱愼㹦

The following object is masked from 愼㸱愼㹥package:dplyr愼㸱愼㹦:

    compute
#adabag library
library(adabag)
Loading required package: foreach
Loading required package: doParallel
Loading required package: iterators
Loading required package: parallel
#Stochastic Gradient Descent (SGD) Method Learning Function
library(gradDescent)

Attaching package: 愼㸱愼㹥gradDescent愼㸱愼㹦

The following object is masked from 愼㸱愼㹥package:neuralnet愼㸱愼㹦:

    prediction

The following object is masked from 愼㸱愼㹥package:caret愼㸱愼㹦:

    RMSE
library(lightgbm)
Loading required package: R6

Attaching package: 愼㸱愼㹥lightgbm愼㸱愼㹦

The following objects are masked from 愼㸱愼㹥package:xgboost愼㸱愼㹦:

    getinfo, setinfo, slice

The following object is masked from 愼㸱愼㹥package:dplyr愼㸱愼㹦:

    slice
#https://www.kaggle.com/c/amazon-employee-access-challenge/discussion/5128#38925

#matrix library
library(Matrix)

Attaching package: 愼㸱愼㹥Matrix愼㸱愼㹦

The following object is masked from 愼㸱愼㹥package:reshape愼㸱愼㹦:

    expand
#catboost
library(catboost)

#fast naive bayes
library("fastNaiveBayes")

#tidyverse for easy data manipulation and visualization
#caret for easy machine learning workflow

#mlp
library(RSNNS)
Loading required package: Rcpp

Attaching package: 愼㸱愼㹥RSNNS愼㸱愼㹦

The following objects are masked from 愼㸱愼㹥package:caret愼㸱愼㹦:

    confusionMatrix, train
library(tidyverse)
Registered S3 methods overwritten by 'dbplyr':
  method         from
  print.tbl_lazy     
  print.tbl_sql      
-- Attaching packages --------------------------------------- tidyverse 1.3.0 --
愼㸱挼㹣 tibble  3.0.0     愼㸱挼㹣 purrr   0.3.4
愼㸱挼㹣 tidyr   1.0.2     愼㸱挼㹣 stringr 1.4.0
愼㸱挼㹣 readr   1.3.1     愼㸱挼㹣 forcats 0.5.0
-- Conflicts ------------------------------------------ tidyverse_conflicts() --
x purrr::accumulate()     masks foreach::accumulate()
x data.table::between()   masks dplyr::between()
x randomForest::combine() masks dplyr::combine()
x neuralnet::compute()    masks dplyr::compute()
x tidyr::expand()         masks Matrix::expand(), reshape::expand()
x dplyr::filter()         masks stats::filter()
x data.table::first()     masks dplyr::first()
x dplyr::lag()            masks stats::lag()
x data.table::last()      masks dplyr::last()
x purrr::lift()           masks caret::lift()
x randomForest::margin()  masks ggplot2::margin()
x tidyr::pack()           masks Matrix::pack()
x reshape::rename()       masks dplyr::rename()
x lightgbm::slice()       masks xgboost::slice(), dplyr::slice()
x purrr::transpose()      masks data.table::transpose()
x tidyr::unpack()         masks Matrix::unpack()
x purrr::when()           masks foreach::when()
library(caret)

featureSoilTable <- read.csv(file = "featureTable.csv",stringsAsFactors=FALSE)

Grouping data in a Pivot Table

print(head(featureSoilTable))

create the normalize function

normalize <-function(y) {
  
  x<-y[!is.na(y)]
  
  x<-(x - min(x)) / (max(x) - min(x))
  
  y[!is.na(y)]<-x
  
  return(y)
}

preprocessing of the featuring table

#change the NULL to na
featureSoilTable['h_texture'][featureSoilTable['h_texture'] == "NULL"] <- NA
#add appendix to colname to avoid mis-understand of the title of dataframe
colnames(featureSoilTable) <- paste("Str",colnames(featureSoilTable),sep = "_")

remove invalid value and set NA value to 0

#extract valid and invalid soil sample
validsoilTexture <- featureSoilTable[!is.na(featureSoilTable$Str_h_texture),]
invalidsoilTexture <- featureSoilTable[is.na(featureSoilTable$Str_h_texture),]

# remove columns that only have nas
validsoilTexture <- validsoilTexture[,colSums(is.na(validsoilTexture))<nrow(validsoilTexture)]
#remove rows have less than 4 data
contribution <- as.data.frame(rowsum(rep(1,times = length(validsoilTexture$Str_h_texture)), validsoilTexture$Str_h_texture),row.names = count)
label <- sort(unique(validsoilTexture$Str_h_texture))
contribution <- cbind(label,contribution)
invaliddata <- contribution[contribution$V1 < 4,]

for (l in invaliddata$label){
  rowlist = which(validsoilTexture$Str_h_texture == l)
  #print(rowlist)
  validsoilTexture <- validsoilTexture[-rowlist,]

}

set x to numeric

validsoilTexture$Str_h_texture <- as.numeric(as.factor(validsoilTexture$Str_h_texture))
validsoilTexture[,-1] <- apply(apply(validsoilTexture[,-1], 2, as.factor), 2, as.numeric)
validsoilTexture[,-1]<- (apply(validsoilTexture[,-1],2,normalize))
validsoilTexture <- as.data.frame(validsoilTexture)
#change null value to 0
validsoilTexture[is.na(validsoilTexture)] = 0

ncol <- ncol(validsoilTexture)

set random seed

set.seed(122)

give the valid sample

split = sample.split(validsoilTexture$Str_h_texture,SplitRatio = 0.7)

train_set = subset(validsoilTexture, split == TRUE)
test_set = subset(validsoilTexture, split == FALSE)

train_set$Str_h_texture = as.numeric(train_set$Str_h_texture)
test_set$Str_h_texture = as.numeric(test_set$Str_h_texture)
summary(train_set)
 Str_h_texture    Str_samp_no       Str_labr_no      Str_X1.40E.02      Str_X1.40E.04       Str_X1.80E.03         Str_X10_BC       
 Min.   : 1.00   Min.   :0.00000   Min.   :0.00000   Min.   :0.00e+00   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000  
  Str_X10A_NR          Str_X10A1           Str_X10B          Str_X10B_NR        Str_X10B1          Str_X10B3          Str_X10D1       
 Min.   :0.0000000   Min.   :0.00e+00   Min.   :0.0000000   Min.   :0.0e+00   Min.   :0.000000   Min.   :0.00e+00   Min.   :0.000000  
   Str_X11A1       Str_X12_HCL_CU Str_X12_HCL_FE    Str_X12_HCL_MN Str_X12_HCL_ZN Str_X12_HF_CU      Str_X12_HF_FE      Str_X12_HF_MN      
 Min.   :0.0e+00   Min.   :0      Min.   :0.00000   Min.   :0      Min.   :0      Min.   :0.000000   Min.   :0.000000   Min.   :0.0000000  
 Str_X12_HF_ZN       Str_X12_NR_CU     Str_X12_NR_FE       Str_X12_NR_MN      Str_X12_NR_ZN       Str_X12_XRF_CU     Str_X12_XRF_FE     
 Min.   :0.0000000   Min.   :0.0e+00   Min.   :0.0000000   Min.   :0.00e+00   Min.   :0.0000000   Min.   :0.000000   Min.   :0.0000000  
 Str_X12_XRF_MN      Str_X12_XRF_ZN       Str_X12A1_CD  Str_X12A1_CO  Str_X12A1_Cu  Str_X12A1_CU        Str_X12A1_Fe  Str_X12A1_FE       Str_X12A1_Mn
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0     Min.   :0     Min.   :0     Min.   :0.0000000   Min.   :0     Min.   :0.000000   Min.   :0    
  Str_X12A1_MN       Str_X12A1_PB  Str_X12A1_Zn  Str_X12A1_ZN        Str_X12B1_CU        Str_X12B1_ZN        Str_X12B2_CD  Str_X12B2_CU  Str_X12B2_PB
 Min.   :0.000000   Min.   :0     Min.   :0     Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0     Min.   :0     Min.   :0    
  Str_X12B2_ZN   Str_X12C1         Str_X12C2          Str_X13_C_FE      Str_X13_NR_AL       Str_X13_NR_FE      Str_X13_NR_MN       Str_X13A1_AL      
 Min.   :0     Min.   :0.00000   Min.   :0.0000000   Min.   :0.00e+00   Min.   :0.0000000   Min.   :0.00e+00   Min.   :0.000000   Min.   :0.0000000  
  Str_X13A1_FE       Str_X13A1_MN        Str_X13A1_SI        Str_X13B1_AL       Str_X13B1_FE      Str_X13C_C_FE  Str_X13C1_AL        Str_X13C1_FE     
 Min.   :0.000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.00e+00   Min.   :0.00e+00   Min.   :0     Min.   :0.0000000   Min.   :0.000000  
 Str_X13C1_FE203      Str_X13C1_MN        Str_X13C1_SI        Str_X14_NR_S        Str_X140   Str_X14B1          Str_X14C1          Str_X14D1_C      
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0e+00   Min.   :0   Min.   :0.000000   Min.   :0.0000000   Min.   :0.00e+00  
  Str_X14D2_BC         Str_X14F1          Str_X14H1_CA        Str_X14H1_K         Str_X14H1_MG       Str_X14H1_NA      Str_X15_BASES     
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.00e+00   Min.   :0.00e+00   Min.   :0.00e+00  
 Str_X15_HSK_CEC       Str_X15_NR      Str_X15_NR_AL       Str_X15_NR_BSa      Str_X15_NR_BSP     Str_X15_NR_CA      Str_X15_NR_CEC    
 Min.   :0.0000000   Min.   :0.00000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000  
 Str_X15_NR_CMR     Str_X15_NR_ESP      Str_X15_NR_H       Str_X15_NR_K      Str_X15_NR_MG      Str_X15_NR_MN      Str_X15_NR_NA      
 Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.00e+00   Min.   :0.0000000  
  Str_X15A1_CA      Str_X15A1_CEC      Str_X15A1_K        Str_X15A1_MG       Str_X15A1_MN  Str_X15A1_NA        Str_X15A2_CA      Str_X15A2_CEC     
 Min.   :0.000000   Min.   :0.00000   Min.   :0.000000   Min.   :0.000000   Min.   :0     Min.   :0.0000000   Min.   :0.000000   Min.   :0.000000  
  Str_X15A2_K        Str_X15A2_MG      Str_X15A2_NA      Str_X15A3_NA       Str_X15B1_CA      Str_X15B1_CEC  Str_X15B1_K        Str_X15B1_MG      
 Min.   :0.000000   Min.   :0.00000   Min.   :0.00000   Min.   :0.000000   Min.   :0.00e+00   Min.   :0     Min.   :0.00e+00   Min.   :0.0000000  
  Str_X15B1_NA        Str_X15B2_CA       Str_X15B2_CEC        Str_X15B2_K         Str_X15B2_MG        Str_X15B2_NA       Str_X15C1_CA     
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.000000  
 Str_X15C1_CEC       Str_X15C1_K        Str_X15C1_MG      Str_X15C1_NA      Str_X15C1mod_CA Str_X15C1mod_K Str_X15C1mod_MG Str_X15C1mod_NA
 Min.   :0.000000   Min.   :0.000000   Min.   :0.00000   Min.   :0.000000   Min.   :0       Min.   :0      Min.   :0       Min.   :0      
 Str_X15C1modCEC  Str_X15D1_AL       Str_X15D1_CA       Str_X15D1_CEC       Str_X15D1_K         Str_X15D1_MG        Str_X15D1_NA      
 Min.   :0       Min.   :0.000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000  
  Str_X15D2_CA      Str_X15D2_CEC        Str_X15D2_K         Str_X15D2_MG        Str_X15D2_NA        Str_X15D3_CA Str_X15D3_CEC  Str_X15D3_K
 Min.   :0.000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0     Min.   :0     Min.   :0   
  Str_X15D3_MG  Str_X15D3_NA  Str_X15E1_AL       Str_X15E1_CA      Str_X15E1_CEC  Str_X15E1_H         Str_X15E1_K         Str_X15E1_MG     
 Min.   :0     Min.   :0     Min.   :0.000000   Min.   :0.000000   Min.   :0     Min.   :0.0000000   Min.   :0.0000000   Min.   :0.000000  
  Str_X15E1_MN        Str_X15E1_NA       Str_X15E1mod_AL Str_X15E1mod_CA Str_X15E1mod_K Str_X15E1mod_MG Str_X15E1mod_MN Str_X15E1mod_NA
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0       Min.   :0       Min.   :0      Min.   :0       Min.   :0       Min.   :0      
  Str_X15E2_CA       Str_X15E2_K        Str_X15E2_MG       Str_X15E2_NA      Str_X15E2mod_AL Str_X15E2mod_CA Str_X15E2mod_K Str_X15E2mod_MG
 Min.   :0.00e+00   Min.   :0.00e+00   Min.   :0.00e+00   Min.   :0.00e+00   Min.   :0       Min.   :0       Min.   :0      Min.   :0      
 Str_X15E2mod_MN Str_X15E2mod_NA  Str_X15F1_CA      Str_X15F1_CEC       Str_X15F1_K        Str_X15F1_MG       Str_X15F1_NA        Str_X15F2        
 Min.   :0       Min.   :0       Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.0000000  
  Str_X15F2_AL         Str_X15F3          Str_X15F4           Str_X15G_C        Str_X15G_C_AL1     Str_X15G_C_AL2     Str_X15G_C_H1        Str_X15G_D
 Min.   :0.0000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0   
   Str_X15G_H         Str_X15G1          Str_X15G1_AL        Str_X15G1_H         Str_X15I2   Str_X15I3           Str_X15I4         Str_X15J_BASES   
 Min.   :0.00e+00   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.00000  
   Str_X15J_C          Str_X15J_H         Str_X15J1        Str_X15J2_MCLW   Str_X15K1   Str_X15L1        Str_X15L1_a       Str_X15M1_CMR
 Min.   :0.0000000   Min.   :0.000000   Min.   :0.000000   Min.   :0      Min.   :0   Min.   :0.00000   Min.   :0.000000   Min.   :0    
 Str_X15M1_K.Mg Str_X15M1AlECEC Str_X15M1CaCEC Str_X15M1CaECEC Str_X15M1KCEC Str_X15M1KECEC Str_X15M1MgCEC Str_X15M1MgECEC   Str_X15N1       
 Min.   :0      Min.   :0       Min.   :0      Min.   :0       Min.   :0     Min.   :0      Min.   :0      Min.   :0       Min.   :0.000000  
  Str_X15N1_a        Str_X15N1_b         Str_X15O1  Str_X17A_HF.      Str_X17A_NR         Str_X17A1         Str_X17A3_CA  Str_X17A3_MG  Str_X17A3_NA
 Min.   :0.000000   Min.   :0.000000   Min.   :0   Min.   :0.0e+00   Min.   :0.000000   Min.   :0.000000   Min.   :0     Min.   :0     Min.   :0    
  Str_X17A3_S  Str_X17D1_CR  Str_X17D1_CU  Str_X17D1_FE  Str_X17D1_MN  Str_X17D1_NI  Str_X17D1_PB  Str_X17D1_ZN   Str_X18_NR         Str_X18_NR_K     
 Min.   :0    Min.   :0     Min.   :0     Min.   :0     Min.   :0     Min.   :0     Min.   :0     Min.   :0     Min.   :0.0000000   Min.   :0.000000  
   Str_X18A1         Str_X18A1_NR        Str_X18A1mod   Str_X18B1          Str_X18B2         Str_X18F1_Al  Str_X18F1_AL       Str_X18F1_As
 Min.   :0.000000   Min.   :0.0000000   Min.   :0     Min.   :0.00e+00   Min.   :0.000000   Min.   :0     Min.   :0.000000   Min.   :0    
  Str_X18F1_AS        Str_X18F1_B         Str_X18F1_Ca  Str_X18F1_CA       Str_X18F1_Cd  Str_X18F1_CD        Str_X18F1_Co  Str_X18F1_CO     
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0     Min.   :0.000000   Min.   :0     Min.   :0.0000000   Min.   :0     Min.   :0.000000  
  Str_X18F1_Cu  Str_X18F1_CU        Str_X18F1_Fe  Str_X18F1_FE       Str_X18F1_K        Str_X18F1_Mg  Str_X18F1_MG        Str_X18F1_Mn
 Min.   :0     Min.   :0.0000000   Min.   :0     Min.   :0.000000   Min.   :0.000000   Min.   :0     Min.   :0.0000000   Min.   :0    
  Str_X18F1_MN      Str_X18F1_Mo  Str_X18F1_MO       Str_X18F1_Na  Str_X18F1_NA        Str_X18F1_Ni  Str_X18F1_NI        Str_X18F1_P      
 Min.   :0.00000   Min.   :0     Min.   :0.000000   Min.   :0     Min.   :0.0000000   Min.   :0     Min.   :0.0000000   Min.   :0.000000  
  Str_X18F1_Pb  Str_X18F1_PB       Str_X18F1_S         Str_X18F1_Se  Str_X18F1_SE       Str_X18F1_Zn  Str_X18F1_ZN       Str_X18I1_CA  Str_X18I1_MG
 Min.   :0     Min.   :0.000000   Min.   :0.0000000   Min.   :0     Min.   :0.000000   Min.   :0     Min.   :0.000000   Min.   :0     Min.   :0    
  Str_X18I1_NA  Str_X18I1_S  Str_X19_COL         Str_X19A1         Str_X19B_NR         Str_X19B1          Str_X19B2   Str_X19F1   Str_X19F1b
 Min.   :0     Min.   :0    Min.   :0.00e+00   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0   Min.   :0   Min.   :0   
 Str_X2.00E.01     Str_X2.00E.02       Str_X2_LOI          Str_X2A1          Str_X2D1          Str_X2Z1_R1      Str_X2Z1_R2        Str_X2Z2_C     
 Min.   :0.00000   Min.   :0.00000   Min.   :0.000000   Min.   :0.00000   Min.   :0.0000000   Min.   :0.0000   Min.   :0.00000   Min.   :0.00000  
 Str_X2Z2_CLAY       Str_X2Z2_CS       Str_X2Z2_FS        Str_X2Z2_S        Str_X2Z2_Z         Str_X3_C_B         Str_X3_NR        Str_X3A_C_2.5      
 Min.   :0.000000   Min.   :0.00000   Min.   :0.00000   Min.   :0.00000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.0000000  
  Str_X3A_TSS           Str_X3A1          Str_X4_NR        Str_X4A_C_1       Str_X4A_C_2.5         Str_X4A1       Str_X4A1_MCLW   Str_X4B_AL     
 Min.   :0.0000000   Min.   :0.00e+00   Min.   :0.00000   Min.   :0.000000   Min.   :0.000000   Min.   :0.00000   Min.   :0     Min.   :0.0e+00  
 Str_X4B_AL_NR      Str_X4B_C_2.5          Str_X4B1          Str_X4B2          Str_X4B4 Str_X4B5_MCLW  Str_X4C_C_1          Str_X4C1       
 Min.   :0.000000   Min.   :0.0000000   Min.   :0.00000   Min.   :0.00000   Min.   :0   Min.   :0     Min.   :0.000000   Min.   :0.000000  
   Str_X4G_NR          Str_X5_C_B          Str_X5_NR         Str_X5A_C_2.5       Str_X5A_NR           Str_X5A1            Str_X5A2           Str_X5A2b
 Min.   :0.000e+00   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0e+00   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0  
   Str_X6_DC            Str_X6A1          Str_X6A1_UC          Str_X6B1           Str_X6B2          Str_X6B2b    Str_X6B3          Str_X6B3a
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0   Min.   :0.000000   Min.   :0  
   Str_X6B3b Str_X6B4_0_30 Str_X6B4_30_100  Str_X6H1_HOC  Str_X6H1_POC  Str_X6H1_ROC  Str_X6H1_TOC   Str_X6H2a   Str_X6H2b   Str_X6H2c    Str_X6H3
 Min.   :0   Min.   :0     Min.   :0       Min.   :0     Min.   :0     Min.   :0     Min.   :0     Min.   :0   Min.   :0   Min.   :0   Min.   :0  
 Str_X6H3_0_30 Str_X6H3_30_100    Str_X6Z          Str_X7_C_B          Str_X7_NR            Str_X7A1          Str_X7A2          Str_X7A2a        
 Min.   :0     Min.   :0       Min.   :0.00000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.00000   Min.   :0.000000   Min.   :0.0000000  
    Str_X7A5       Str_X7A6b_MCLW    Str_X7B1         Str_X7C_1MKCla Str_X7C_1MKClb Str_X7C_CASO4          Str_X7C1           Str_X7C1a       
 Min.   :0.00000   Min.   :0      Min.   :0.0000000   Min.   :0      Min.   :0      Min.   :0.0000000   Min.   :0.0000000   Min.   :0.000000  
   Str_X7C1b           Str_X7C1d           Str_X7C1e           Str_X7C2b Str_X7C2b_NH4 Str_X7C2b_NO3   Str_X7D1a   Str_X7E1a   Str_X7E1b
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0   Min.   :0     Min.   :0     Min.   :0   Min.   :0   Min.   :0  
    Str_X8A1        Str_X9.00E.02        Str_X9_E_NR         Str_X9_NR          Str_X9A_HCL        Str_X9A_HCLP2O5      Str_X9A_HF.      
 Min.   :0.000000   Min.   :0.0000000   Min.   :0.00e+00   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.00e+00  
   Str_X9A_NR         Str_X9A_S14           Str_X9A1            Str_X9A3          Str_X9A3a            Str_X9B    Str_X9B_9C          Str_X9B_NR      
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0   Min.   :0.0000000   Min.   :0.000000  
    Str_X9B1            Str_X9B2         Str_X9B2_COL        Str_X9BUFF_0       Str_X9BUFF_0.5       Str_X9BUFF_1        Str_X9BUFF_2      
 Min.   :0.0000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000  
  Str_X9BUFF_4          Str_X9C1    Str_X9C2           Str_X9D2            Str_X9E          Str_X9G_BSES          Str_X9G1            Str_X9G2       
 Min.   :0.0000000   Min.   :0   Min.   :0.000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.00e+00  
   Str_X9H_NR          Str_X9H1           Str_X9I1           Str_X9I2b   Str_X9I2B    Str_X9J2           Str_X9R1           Str_M1a 
 Min.   :0.00e+00   Min.   :0.000000   Min.   :0.0000000   Min.   :0   Min.   :0   Min.   :0.000000   Min.   :0.000000   Min.   :0  
   Str_MIN_EC       Str_MIN_NR_K2O       Str_P10_1m2m      Str_P10_20_100      Str_P10_20_75     Str_P10_20_75a      Str_P10_75_106     Str_P10_C_MCLW
 Min.   :0.000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0.00000   Min.   :0.0000000   Min.   :0.000000   Min.   :0     
  Str_P10_CF_C     Str_P10_CF_CS     Str_P10_CF_FS      Str_P10_CF_S       Str_P10_CF_Z      Str_P10_GRAV      Str_P10_gt2m      Str_P10_gt2MI     
 Min.   :0.00000   Min.   :0.00000   Min.   :0.00000   Min.   :0.000000   Min.   :0.00000   Min.   :0.00000   Min.   :0.000000   Min.   :0.000000  
 Str_P10_gt2OM      Str_P10_HYD_C      Str_P10_HYD_CS      Str_P10_HYD_FS     Str_P10_HYD_S Str_P10_HYD_Z       Str_P10_I_C  Str_P10_I_CS
 Min.   :0.00e+00   Min.   :0.000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0     Min.   :0.000000   Min.   :0    Min.   :0    
  Str_P10_I_FS  Str_P10_I_S  Str_P10_I_Z  Str_P10_NR_C     Str_P10_NR_CS     Str_P10_NR_FS      Str_P10_NR_S     Str_P10_NR_Saa     Str_P10_NR_Z    
 Min.   :0     Min.   :0    Min.   :0    Min.   :0.00000   Min.   :0.00000   Min.   :0.00000   Min.   :0.00000   Min.   :0.00000   Min.   :0.00000  
 Str_P10_NR_ZC       Str_P10_PB_C     Str_P10_PB_CS     Str_P10_PB_FS      Str_P10_PB_S       Str_P10_PB_Z     Str_P10_PB1_C       Str_P10_PB1_CS     
 Min.   :0.00e+00   Min.   :0.00000   Min.   :0.00000   Min.   :0.00000   Min.   :0.000000   Min.   :0.00000   Min.   :0.0000000   Min.   :0.0000000  
 Str_P10_PB1_FS     Str_P10_PB1_Z       Str_P10_S_0.20     Str_P10_S_0.48      Str_P10_S_1       Str_P10_S_1000    Str_P10_S_125     
 Min.   :0.000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.00000   Min.   :0.000000  
 Str_P10_S_15.6      Str_P10_S_2        Str_P10_S_20      Str_P10_S_2000     Str_P10_S_250       Str_P10_S_3.9      Str_P10_S_31.2     
 Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.0000000  
 Str_P10_S_500       Str_P10_S_53       Str_P10_S_63       Str_P10_S_7.8      Str_P10_S_MCLW Str_P10_Z_MCLW Str_P10100_200      Str_P10106_150    
 Min.   :0.000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0      Min.   :0      Min.   :0.0000000   Min.   :0.000000  
 Str_P10150_180     Str_P10180_300     Str_P10200_500      Str_P10200_600      Str_P102002000      Str_P10300_600     Str_P105002000    
 Min.   :0.000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.00e+00  
 Str_P106001000     Str_P106002000       Str_P10A1_C        Str_P10A1_CS       Str_P10A1_FS      Str_P10A1_Z        Str_P3A_CLW   Str_P3A_NR      
 Min.   :0.000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.00000   Min.   :0.000000   Min.   :0    Min.   :0.000000  
    Str_P3A1       Str_P3A1_C4        Str_P3A1_CLOD        Str_P3A1_e  Str_P3A2_McK       Str_P3A2_McKMP     Str_P3B_GV_01     Str_P3B_GV_03      
 Min.   :0.0000   Min.   :0.0000000   Min.   :0.000000   Min.   :0    Min.   :0.0000000   Min.   :0.000000   Min.   :0.00000   Min.   :0.0000000  
 Str_P3B_GV_15       Str_P3B_NR_005 Str_P3B_NR_01      Str_P3B_NR_15      Str_P3B_VL_01      Str_P3B_VL_15      Str_P3B1GV_15      
 Min.   :0.0000000   Min.   :0      Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.0000000  
  Str_P3B1VL_1       Str_P3B1VL_15        Str_P3B2GV_1      Str_P3B2GV_15       Str_P3B2GV_5      Str_P3B2VL_03        Str_P3B2VL_1     
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0.00e+00   Min.   :0.00e+00   Min.   :0.00e+00   Min.   :0.0000000   Min.   :0.000000  
 Str_P3B2VL_15       Str_P3B2VL_5      Str_P3B3VLa001     Str_P3B3VLa005      Str_P3B3VLa01       Str_P3B3VLa03      Str_P3B3VLa06     
 Min.   :0.000000   Min.   :0.000000   Min.   :0.00e+00   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.00e+00   Min.   :0.00e+00  
 Str_P3B3VLaSAT      Str_P3B3VLb001     Str_P3B3VLb003     Str_P3B3VLb005     Str_P3B3VLb01      Str_P3B3VLb03     Str_P3B3VLb05     
 Min.   :0.0000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.00000   Min.   :0.000000  
 Str_P3B3VLb06     Str_P3B3VLbSAT     Str_P3B3VLc001     Str_P3B3VLc003      Str_P3B3VLc005     Str_P3B3VLc01      Str_P3B3VLc03     
 Min.   :0.00000   Min.   :0.000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.000000   Min.   :0.000000  
 Str_P3B3VLc06       Str_P3B3VLcSAT      Str_P3B3VLd06       Str_P3B3VLd1       Str_P3B3VLd15        Str_P3B3VLd3        Str_P3B3VLd5      
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000  
 Str_P3B3VLe004      Str_P3B3VLe01       Str_P3B3VLe03       Str_P3B3VLe06       Str_P3B3VLe15        Str_P3B3VLe2        Str_P3B3VLe7      
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000  
 Str_P3B4GV_01      Str_P3B4VL_005      Str_P3B5GV_01       Str_P3B6VL_DUL Str_P3B6VL_LL Str_P3B6VL_SAT Str_P4_100DMcK      Str_P4_10DMcK      
 Min.   :0.000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0      Min.   :0     Min.   :0      Min.   :0.0000000   Min.   :0.0000000  
 Str_P4_30_LOV       Str_P4_30DMcK     Str_P4_50_McK       Str_P4_50DMcK         Str_P4_sat Str_P4_sat_FH      Str_P4_sat_For      Str_P4_sat_LOV   
 Min.   :0.0000000   Min.   :0.0e+00   Min.   :0.0000000   Min.   :0.0000000   Min.   :0    Min.   :0.00e+00   Min.   :0.0000000   Min.   :0.00000  
 Str_P4_sat_McK       Str_P5_COLE       Str_P5_LS_MOD         Str_P6_LP         Str_PWS1.2mm        Str_PWS20.63      Str_PWS212.425    
 Min.   :0.0000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0.000000  
 Str_PWS425.1mm     Str_PWS63.212      Str_TE_MIR_AL2O3 Str_TE_MIR_FE2O3 Str_TE_MIR_SI02  Str_TE_NR_AL       Str_TE_NR_AL2O       Str_TE_NR_CA     
 Min.   :0.000000   Min.   :0.000000   Min.   :0        Min.   :0        Min.   :0       Min.   :0.0000000   Min.   :0.0000000   Min.   :0.00e+00  
 Str_TE_NR_FE20       Str_TE_NR_MG       Str_TE_NR_NA      Str_TE_NR_SI02 Str_TE_NR_TI02     Str_TE_XRF_MG       Str_TE_XRFAL       Str_TE_XRFCA      
 Min.   :0.0000000   Min.   :0.00e+00   Min.   :0.00e+00   Min.   :0      Min.   :0.00e+00   Min.   :0.000000   Min.   :0.00e+00   Min.   :0.0000000  
  Str_TE_XRFNA      Str_TE_XRFSI02      Str_TE_XRFTIO2      Str_XRD_C_Amp  Str_XRD_C_An      Str_XRD_C_Bhm       Str_XRD_C_Bt Str_XRD_C_Cal      
 Min.   :0.00e+00   Min.   :0.0000000   Min.   :0.0000000   Min.   :0     Min.   :0.00e+00   Min.   :0.00e+00   Min.   :0     Min.   :0.0000000  
 Str_XRD_C_Ch2       Str_XRD_C_Chl       Str_XRD_C_Fsp       Str_XRD_C_Gbs       Str_XRD_C_Gth       Str_XRD_C_Hem      Str_XRD_C_Ht0
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.0000000   Min.   :0.000000   Min.   :0    
 Str_XRD_C_Ilt       Str_XRD_C_Is     Str_XRD_C_K2O       Str_XRD_C_Ka     Str_XRD_C_Kln        Str_XRD_C_Lp      Str_XRD_C_Mag Str_XRD_C_Mca      
 Min.   :0.000000   Min.   :0.00000   Min.   :0.000000   Min.   :0.00000   Min.   :0.0000000   Min.   :0.00e+00   Min.   :0     Min.   :0.0000000  
 Str_XRD_C_Mgh       Str_XRD_C_Mnt        Str_XRD_C_Ms Str_XRD_C_Plg      Str_XRD_C_Plm  Str_XRD_C_Qz       Str_XRD_C_Rt Str_XRD_C_Sme      
 Min.   :0.0000000   Min.   :0.0000000   Min.   :0     Min.   :0.00e+00   Min.   :0     Min.   :0.000000   Min.   :0     Min.   :0.0000000  
  Str_XRD_C_Tc     Str_XRD_C_Vrm      
 Min.   :0.0e+00   Min.   :0.0000000  
 [ reached getOption("max.print") -- omitted 5 rows ]
# Find the best model with the best cost parameter via 10-fold cross-validations

# the tunning part of svm, which will take lots of time to run

tryTypes=c(0:7)
tryCosts=c(1000,1,0.001)
bestCost=NA
bestAcc=0.6290723
bestType=NA

for(ty in tryTypes){

   for(co in tryCosts){
    acc=LiblineaR(data=train_set[,-1],target=train_set[,c("Str_h_texture")],type=7,cost=co,bias=1,verbose=FALSE)
    cat("Results for C=",co," : ",acc," accuracy.\n",sep="")
    if(acc>bestAcc){
    bestCost=co
    bestAcc=acc
    bestType=ty
    }
  }

}

svm classifier

LIBLINEAR is a linear classifier for data with millions of instances and features. It supports L2-regularized classifiers, L2-loss linear SVM, L1-loss linear SVM, and logistic regression (LR).LiblineaR allows the estimation of predictive linear models for classification and regression, such as L1- or L2-regularized logistic regression, L1- or L2-regularized L2-loss support vector classification, L2-regularized L1-loss support vector classification and multi-class support vector classification. It also supports L2-regularized support vector regression (with L1- or L2-loss). The estimation of the models is particularly fast as compared to other libraries.

svmStarttime <- Sys.time()
svmClassifier <- LiblineaR(data = train_set[,-1],target = train_set[,c("Str_h_texture")],bias=1,cost = 1000)
svmPredictTrain <- predict(svmClassifier,train_set[,-1],proba=TRUE,decisionValues=TRUE)
svmPredictTrainTable <- table(svmPredictTrain$predictions,train_set[,c("Str_h_texture")])
svmEndtime <- Sys.time()
svmTimeTaken <- svmEndtime - svmStarttime
svmPredictTest <- predict(svmClassifier,test_set[,-1],proba=TRUE,decisionValues=TRUE)
svmPredictTestTable <- table(svmPredictTest$predictions,test_set[,c("Str_h_texture")])

function for calculating the score of matirx

sumElementinTable <- function(a,c,r){
  sum = 0
  for (i in c){
    if (i %in% r){
      sum = sum + a[i,i]
    }
  }
  return(sum)
}

calculating the score of svmClassifier


svmTestcol <- colnames(svmPredictTestTable)
svmTestrow <- rownames(svmPredictTestTable)

svmTraincol <- colnames(svmPredictTrainTable)
svmTrainrow <- rownames(svmPredictTrainTable)


svmPredictTestScore <- sumElementinTable(svmPredictTestTable,svmTestcol,svmTestrow)/sum(svmPredictTestTable)
svmPredictTrainScore <- sumElementinTable(svmPredictTrainTable,svmTraincol,svmTrainrow)/sum(svmPredictTrainTable)
# the time of svm is:
cat("the running time of svm is",svmTimeTaken, "seconds")
the running time of svm is 40.66578 seconds
#the score of svm is

cat("The train score of svm algorithm is ",svmPredictTrainScore,'\n')
The train score of svm algorithm is  0.32799 
cat("The test score of svm algorithm is ",svmPredictTestScore)
The test score of svm algorithm is  0.3019961

classification is CART model

cartFit <- rpart(Str_h_texture ~ .,data = train_set,control = rpart.control(cp = 0.0001))

#get cp value
printcp(cartFit)

Regression tree:
rpart(formula = Str_h_texture ~ ., data = train_set, control = rpart.control(cp = 1e-04))

Variables actually used in tree construction:
  [1] Str_MIN_EC     Str_P10_1m2m   Str_P10_20_75  Str_P10_75_106 Str_P10_CF_C   Str_P10_CF_CS  Str_P10_CF_FS  Str_P10_CF_Z   Str_P10_GRAV  
 [10] Str_P10_HYD_FS Str_P10_NR_C   Str_P10_NR_CS  Str_P10_NR_FS  Str_P10_NR_S   Str_P10_NR_Saa Str_P10_NR_Z   Str_P10_PB_C   Str_P10_PB_CS 
 [19] Str_P10_PB_FS  Str_P10_PB_Z   Str_P10_S_15.6 Str_P10106_150 Str_P10150_180 Str_P10180_300 Str_P106001000 Str_P10A1_CS   Str_P10A1_FS  
 [28] Str_P3A_NR     Str_P3A1       Str_P3B_GV_15  Str_P3B_NR_15  Str_P3B1GV_15  Str_P3B3VLb001 Str_P3B3VLb003 Str_P3B3VLb005 Str_P3B3VLbSAT
 [37] Str_P4_sat_LOV Str_P5_COLE    Str_PWS212.425 Str_samp_no    Str_X10B1      Str_X10D1      Str_X12_HCL_FE Str_X12_HF_FE  Str_X12_NR_FE 
 [46] Str_X12_XRF_FE Str_X12A1_CU   Str_X12A1_FE   Str_X12A1_MN   Str_X12A1_ZN   Str_X12C1      Str_X12C2      Str_X13_C_FE   Str_X13_NR_AL 
 [55] Str_X13_NR_MN  Str_X13A1_AL   Str_X13A1_FE   Str_X13C1_AL   Str_X13C1_FE   Str_X15_NR     Str_X15_NR_AL  Str_X15_NR_BSa Str_X15_NR_BSP
 [64] Str_X15_NR_CA  Str_X15_NR_CEC Str_X15_NR_CMR Str_X15_NR_H   Str_X15_NR_K   Str_X15_NR_MG  Str_X15_NR_NA  Str_X15A1_CA   Str_X15A1_K   
 [73] Str_X15A1_MG   Str_X15A1_NA   Str_X15A2_CA   Str_X15A2_CEC  Str_X15A2_K    Str_X15A2_MG   Str_X15C1_CA   Str_X15C1_MG   Str_X15D1_NA  
 [82] Str_X15E1_AL   Str_X15E1_CA   Str_X15E1_K    Str_X15E1_MG   Str_X15E1_NA   Str_X15F1_MG   Str_X15F1_NA   Str_X15F3      Str_X15G_C    
 [91] Str_X15G_C_AL1 Str_X15G_C_AL2 Str_X15G_C_H1  Str_X15G1      Str_X15G1_H    Str_X15J_BASES Str_X15J_C     Str_X15J_H     Str_X15J1     
[100] Str_X15L1      Str_X15L1_a    Str_X15N1      Str_X15N1_b    Str_X17A_NR    Str_X17A1      Str_X18_NR     Str_X18_NR_K   Str_X18A1_NR  
[109] Str_X18F1_AL   Str_X18F1_B    Str_X18F1_CA   Str_X18F1_CU   Str_X18F1_MN   Str_X18F1_NA   Str_X18F1_NI   Str_X19A1      Str_X19B_NR   
[118] Str_X2.00E.01  Str_X2_LOI     Str_X2A1       Str_X2Z1_R1    Str_X2Z2_C     Str_X2Z2_CLAY  Str_X2Z2_CS    Str_X2Z2_FS    Str_X2Z2_S    
[127] Str_X2Z2_Z     Str_X3_C_B     Str_X3_NR      Str_X3A_C_2.5  Str_X3A_TSS    Str_X3A1       Str_X4_NR      Str_X4A_C_2.5  Str_X4A1      
[136] Str_X4B_AL_NR  Str_X4B_C_2.5  Str_X4B1       Str_X4B2       Str_X4C1       Str_X5_NR      Str_X5A1       Str_X5A2       Str_X6A1      
[145] Str_X6A1_UC    Str_X6B2       Str_X6B3       Str_X6Z        Str_X7_NR      Str_X7A1       Str_X7A2       Str_X7A5       Str_X7C1a     
[154] Str_X7C1b      Str_X7C1e      Str_X8A1       Str_X9_NR      Str_X9A_HCL    Str_X9A_NR     Str_X9A1       Str_X9A3       Str_X9B_9C    
[163] Str_X9B_NR     Str_X9B1       Str_X9BUFF_4   Str_X9C2       Str_X9G_BSES   Str_X9H1      

Root node error: 7040046/47099 = 149.47

n= 47099 

            CP nsplit rel error  xerror      xstd
1   0.01055639      0   1.00000 1.00005 0.0057465
2   0.00731024      1   0.98944 0.99031 0.0057947
3   0.00725937      2   0.98213 0.98692 0.0058184
4   0.00525476      3   0.97487 0.97611 0.0058165
5   0.00488331      4   0.96962 0.97144 0.0058483
6   0.00422825      5   0.96474 0.96719 0.0058328
7   0.00336802      7   0.95628 0.96011 0.0058526
8   0.00279386      8   0.95291 0.95659 0.0058604
9   0.00275832      9   0.95012 0.95467 0.0058558
10  0.00237534     10   0.94736 0.95357 0.0058554
11  0.00222392     11   0.94498 0.95042 0.0058583
12  0.00199803     12   0.94276 0.94930 0.0058535
13  0.00156699     13   0.94076 0.94692 0.0058284
14  0.00153254     17   0.93449 0.94416 0.0058518
15  0.00136422     18   0.93296 0.94260 0.0058625
16  0.00128611     19   0.93160 0.94124 0.0058722
17  0.00127887     20   0.93031 0.94105 0.0058758
18  0.00122866     21   0.92903 0.94115 0.0058761
19  0.00113090     24   0.92535 0.93962 0.0058766
20  0.00105562     25   0.92422 0.93801 0.0058831
21  0.00104612     26   0.92316 0.93577 0.0058902
22  0.00103891     27   0.92211 0.93563 0.0058932
23  0.00102448     28   0.92107 0.93527 0.0058903
24  0.00098145     29   0.92005 0.93369 0.0058990
25  0.00097992     30   0.91907 0.93321 0.0059020
26  0.00095101     31   0.91809 0.93306 0.0059028
27  0.00091608     35   0.91428 0.93225 0.0059050
28  0.00091066     36   0.91337 0.93150 0.0059042
29  0.00086168     37   0.91246 0.93036 0.0059026
30  0.00084842     38   0.91160 0.93002 0.0059056
31  0.00082214     39   0.91075 0.92901 0.0059044
32  0.00079626     40   0.90993 0.92788 0.0059012
33  0.00079123     42   0.90833 0.92772 0.0059068
34  0.00077650     43   0.90754 0.92717 0.0059051
35  0.00074354     44   0.90677 0.92676 0.0059114
36  0.00071854     45   0.90602 0.92463 0.0059041
37  0.00070313     48   0.90387 0.92389 0.0059031
38  0.00069515     49   0.90316 0.92336 0.0059010
39  0.00068829     51   0.90177 0.92325 0.0059061
40  0.00068765     53   0.90040 0.92289 0.0059048
41  0.00066500     57   0.89765 0.92156 0.0059007
42  0.00065235     58   0.89698 0.92077 0.0059006
43  0.00061076     59   0.89633 0.91930 0.0059044
44  0.00059984     60   0.89572 0.91762 0.0059039
45  0.00059830     61   0.89512 0.91691 0.0059026
46  0.00059315     62   0.89452 0.91692 0.0059041
47  0.00058856     63   0.89393 0.91678 0.0059039
48  0.00058771     64   0.89334 0.91680 0.0059062
49  0.00058214     65   0.89275 0.91680 0.0059062
50  0.00057935     66   0.89217 0.91663 0.0059091
51  0.00057103     67   0.89159 0.91695 0.0059163
52  0.00056815     68   0.89102 0.91681 0.0059209
53  0.00056678     69   0.89045 0.91678 0.0059229
54  0.00056286     70   0.88988 0.91663 0.0059214
55  0.00053944     71   0.88932 0.91681 0.0059289
56  0.00053652     72   0.88878 0.91662 0.0059322
57  0.00051174     73   0.88824 0.91556 0.0059317
58  0.00050440     74   0.88773 0.91505 0.0059329
59  0.00050140     76   0.88672 0.91501 0.0059330
60  0.00049835     77   0.88622 0.91500 0.0059344
61  0.00049812     78   0.88572 0.91500 0.0059344
62  0.00049244     79   0.88523 0.91509 0.0059362
63  0.00048789     80   0.88473 0.91498 0.0059410
64  0.00047739     81   0.88425 0.91522 0.0059491
65  0.00047291     84   0.88281 0.91456 0.0059492
66  0.00046505     85   0.88234 0.91455 0.0059548
67  0.00046257     86   0.88188 0.91431 0.0059615
68  0.00045308     88   0.88095 0.91382 0.0059651
69  0.00044923     89   0.88050 0.91398 0.0059683
70  0.00043963     91   0.87960 0.91395 0.0059734
71  0.00043842     92   0.87916 0.91432 0.0059800
72  0.00043064     94   0.87828 0.91420 0.0059879
73  0.00042962     96   0.87742 0.91458 0.0060036
74  0.00042790     97   0.87699 0.91467 0.0060052
75  0.00042686     98   0.87656 0.91467 0.0060052
76  0.00042243     99   0.87614 0.91495 0.0060100
77  0.00041790    100   0.87571 0.91523 0.0060162
78  0.00041284    101   0.87530 0.91544 0.0060276
79  0.00041211    102   0.87488 0.91545 0.0060310
80  0.00041042    103   0.87447 0.91540 0.0060313
81  0.00039957    104   0.87406 0.91546 0.0060358
82  0.00039822    105   0.87366 0.91528 0.0060397
83  0.00039635    108   0.87247 0.91526 0.0060425
84  0.00039492    109   0.87207 0.91540 0.0060434
85  0.00039134    110   0.87168 0.91573 0.0060462
86  0.00039017    111   0.87128 0.91548 0.0060459
87  0.00038810    112   0.87089 0.91566 0.0060483
88  0.00038620    113   0.87051 0.91567 0.0060508
89  0.00038609    114   0.87012 0.91575 0.0060509
90  0.00038032    115   0.86973 0.91588 0.0060637
91  0.00037442    116   0.86935 0.91584 0.0060698
92  0.00037052    117   0.86898 0.91557 0.0060719
93  0.00036813    126   0.86560 0.91571 0.0060748
94  0.00036370    127   0.86524 0.91600 0.0060802
95  0.00036352    128   0.86487 0.91576 0.0060851
96  0.00036068    130   0.86414 0.91597 0.0060883
97  0.00035767    132   0.86342 0.91568 0.0060891
98  0.00035750    133   0.86307 0.91564 0.0060889
99  0.00035398    135   0.86235 0.91555 0.0060918
100 0.00035194    138   0.86129 0.91577 0.0060988
101 0.00034990    139   0.86094 0.91559 0.0060995
102 0.00034948    140   0.86059 0.91559 0.0061019
103 0.00034676    141   0.86024 0.91527 0.0061011
104 0.00034622    142   0.85989 0.91519 0.0061034
105 0.00034456    147   0.85816 0.91519 0.0061034
106 0.00034246    151   0.85672 0.91536 0.0061096
107 0.00033662    152   0.85638 0.91556 0.0061171
108 0.00033395    153   0.85604 0.91564 0.0061334
109 0.00033268    154   0.85571 0.91567 0.0061383
110 0.00032887    155   0.85537 0.91537 0.0061363
111 0.00032572    156   0.85504 0.91557 0.0061469
112 0.00032510    158   0.85439 0.91615 0.0061493
113 0.00031947    159   0.85407 0.91655 0.0061584
114 0.00031775    161   0.85343 0.91695 0.0061688
115 0.00031593    162   0.85311 0.91720 0.0061729
116 0.00031328    164   0.85248 0.91698 0.0061741
117 0.00031156    165   0.85217 0.91735 0.0061766
118 0.00031037    166   0.85185 0.91726 0.0061757
119 0.00030824    167   0.85154 0.91756 0.0061783
120 0.00030795    168   0.85124 0.91768 0.0061822
121 0.00030772    169   0.85093 0.91782 0.0061877
122 0.00030690    171   0.85031 0.91794 0.0061885
123 0.00030509    172   0.85000 0.91800 0.0061903
124 0.00030499    173   0.84970 0.91780 0.0061904
125 0.00030336    177   0.84844 0.91831 0.0061949
126 0.00030219    178   0.84814 0.91845 0.0061984
127 0.00030110    180   0.84754 0.91839 0.0061997
128 0.00029637    181   0.84723 0.91852 0.0061982
129 0.00029529    182   0.84694 0.91876 0.0062019
130 0.00028994    183   0.84664 0.91813 0.0061992
131 0.00028817    184   0.84635 0.91839 0.0062084
132 0.00028717    185   0.84606 0.91837 0.0062119
133 0.00028586    186   0.84578 0.91845 0.0062140
134 0.00028383    187   0.84549 0.91877 0.0062201
135 0.00028206    188   0.84521 0.91889 0.0062257
136 0.00028135    189   0.84493 0.91936 0.0062305
137 0.00027903    190   0.84464 0.91931 0.0062316
138 0.00027870    193   0.84381 0.91941 0.0062336
139 0.00027781    194   0.84353 0.91950 0.0062338
140 0.00027734    195   0.84325 0.91954 0.0062346
141 0.00027728    198   0.84242 0.91959 0.0062349
142 0.00027542    204   0.84075 0.91953 0.0062346
143 0.00027396    205   0.84048 0.91941 0.0062400
144 0.00027196    207   0.83993 0.91916 0.0062405
145 0.00027078    208   0.83966 0.91927 0.0062438
146 0.00026925    209   0.83939 0.91911 0.0062464
147 0.00026799    210   0.83912 0.91938 0.0062486
148 0.00026768    211   0.83885 0.91938 0.0062480
149 0.00026690    213   0.83832 0.91930 0.0062456
150 0.00026672    214   0.83805 0.91955 0.0062497
151 0.00026547    215   0.83778 0.91956 0.0062504
152 0.00026466    217   0.83725 0.91946 0.0062521
153 0.00026401    218   0.83699 0.91944 0.0062520
154 0.00026229    219   0.83672 0.91974 0.0062546
155 0.00025761    222   0.83594 0.92056 0.0062678
156 0.00025719    225   0.83516 0.92028 0.0062717
157 0.00025523    226   0.83491 0.92069 0.0062754
158 0.00025383    227   0.83465 0.92088 0.0062780
159 0.00025273    228   0.83440 0.92084 0.0062796
160 0.00025248    229   0.83414 0.92092 0.0062820
161 0.00025215    232   0.83339 0.92110 0.0062838
162 0.00025124    233   0.83313 0.92106 0.0062854
163 0.00025104    234   0.83288 0.92150 0.0062901
164 0.00025047    235   0.83263 0.92149 0.0062901
165 0.00024911    238   0.83188 0.92214 0.0062948
166 0.00024848    239   0.83163 0.92198 0.0062953
167 0.00024746    241   0.83113 0.92165 0.0062926
168 0.00024631    242   0.83089 0.92145 0.0062927
169 0.00024584    243   0.83064 0.92167 0.0062962
170 0.00024321    244   0.83040 0.92208 0.0063006
171 0.00024219    245   0.83015 0.92219 0.0063082
172 0.00024212    246   0.82991 0.92251 0.0063106
173 0.00024193    248   0.82943 0.92280 0.0063128
174 0.00024139    253   0.82810 0.92314 0.0063178
175 0.00024093    254   0.82786 0.92331 0.0063212
176 0.00024060    255   0.82761 0.92329 0.0063215
177 0.00024050    256   0.82737 0.92331 0.0063217
178 0.00024007    263   0.82566 0.92337 0.0063230
179 0.00023873    264   0.82542 0.92324 0.0063234
180 0.00023594    265   0.82518 0.92277 0.0063227
181 0.00023546    268   0.82447 0.92304 0.0063255
182 0.00023471    269   0.82423 0.92319 0.0063268
183 0.00023392    270   0.82400 0.92375 0.0063338
184 0.00023384    271   0.82376 0.92388 0.0063341
185 0.00022993    272   0.82353 0.92477 0.0063461
186 0.00022965    274   0.82307 0.92484 0.0063511
187 0.00022961    278   0.82212 0.92495 0.0063537
188 0.00022844    279   0.82189 0.92514 0.0063584
189 0.00022781    282   0.82121 0.92542 0.0063622
190 0.00022772    283   0.82098 0.92529 0.0063625
191 0.00022729    284   0.82075 0.92535 0.0063623
192 0.00022669    285   0.82053 0.92530 0.0063628
193 0.00022647    290   0.81937 0.92568 0.0063689
194 0.00022560    291   0.81914 0.92592 0.0063720
195 0.00022489    292   0.81891 0.92580 0.0063762
196 0.00022486    296   0.81802 0.92640 0.0063835
197 0.00022186    297   0.81779 0.92740 0.0063932
198 0.00022123    302   0.81654 0.92744 0.0063966
199 0.00021997    303   0.81632 0.92731 0.0063971
200 0.00021967    304   0.81610 0.92759 0.0063993
 [ reached getOption("max.print") -- omitted 314 rows ]

choose the CP with lowest xerror

cartstartTime <- Sys.time()

fit.pruned = prune(cartFit, cp = 0.00021967)

cartPrediction <- predict(fit.pruned, test_set, type = "vector")

cartendTime <- Sys.time()

cartTimeTaken <- cartendTime - cartstartTime

data.frame(test_set,cartPrediction)

cartPrediction = round(cartPrediction,0)
cartTable <- table(test_set$Str_h_texture,cartPrediction)

cartTable
    cartPrediction
        6    7    8    9   10   11   12   13   14   15   16   17   18   19   20   21   22   23   24   25   26   27   28   29   30   31   32   33   34
  1     0    0    0    0    1    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    1    0    0    3    0    0    0    0    0
  2     0    0    0    0    1    4    0    4    0    0    0    0    0    0    1    5    2    0    3    0    1    0    3   25    0    0    3    1    0
  3     0    0    0    1    1    0    5    0    2    1    0    0    1    1    4    8    2    0    4    7    0    4    4   51    3    7    3    0    2
  4    11    2   17   38   20    9    7    0   11   36   39    2   25   35   19   15   16   12   20   47   29   21   38  487   28   40   28   15   21
  5     0    0    0    4    2    0    0    0    7    2   16    0    1    4    0    5    0    0    1    2    1    0    4   75    2    6    7    2    3
  6     0    1    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    1    0    0    0    0    8    0    1    3    1    2
  7     0    0    0    0    1    0    0    1    2    3    7    2    1    1    8    2    2    1    0    3    9    5    5  153    3    6    4    6    2
  8     0    0    0    0    1    1    6   13   12    2    2   10    0    4    1   13   10    0   14    5    5    9   22  265    6   43   11   18   12
  9     0    0    0    0    0    3    2    9    1    4    0    0    0    5    1   17    3    0   14    6   18    0    3   92    4    7    4   11    0
  10    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    1    0    1    0    4    0    0    0    0    0
  11    0    1    1    1   13    0    0    1   11    2   23    0    6   14    1    5    0    2    0    8    7   11   10  139    3   12    5    0    3
  12    0    0    0    8    1    0    0    4    7   16   50    0    5   28    8   11    9    2    9   12   15    9   33  172   16   12    8    7    7
  13    0    0    0    0    0    0    0    0    0    0    3    0    1    0    1    2    0    0    0    1    0    5    0   49    1   16    4    0    0
  14    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0   10    0    2    0    0    1
  15    0    0    0    1    0    0    0    0    0    2    0    0    1    2    1    1    1    0    2    0    2    1    2   38    1    5    8    2    2
  16    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    2    0    0    0    0    0
  17    1    0    0    1    3    0    1    0    0    3    0    0   22   29   38   75   23   31   77  115   96   29   54  453   17  170   36    5    6
  18    0    1    0    0    0    0    0    0    0    0    0    0    0    0    1    1    0    0    0    0    0    0    0    1    0    0    0    0    0
  19    0    0    0    0    0    0    1    0    0    0    0    0    0    0    0    2    8    1    5    7    5    2    2   93    6   15    3    9    5
  20    0    0    0    0    0    0    1    3    2    3    4    0    9    1    4   16    3    1    6    7    0    5    8   36    1    4   17    2    1
  21    0    0    0    0    0    0    1    0    0    2    3    1    5    3    3    6    0    0    2    3    2    8    3   37   10   11   12    1    4
  22    0    0    0    1    0    0    0    0    0    1    0    0    0    0    3   10    1    0    0    0    0    1    4   29    1    4    2    0    0
  23    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0    0   10    0    1    2    0    1
    cartPrediction
       35   36   37   38   39   40   41   42   43   47   48   49   50   52
  1     0    0    0    0    1    0    0    0    0    0    0    0    0    0
  2     0    0    0    0    0    0    0    0    0    0    0    0    0    0
  3     2    1    0    0    0    0    0    0    0    0    0    0    0    0
  4    13    6   12   14    1    2    3    1    0    2    0    0    0    0
  5     2    1    0    1    0    0    0    0    0    0    0    0    0    0
  6     5    2    0    3    0    0    0    0    0    0    0    0    0    0
  7     4    5    0    2    4    0    0    0    0    0    0    0    0    0
  8     5    4    2    9    4    0    1    0    0    0    0    0    0    0
  9     4    2    0    2    1    0    1    0    0    0    0    0    0    0
  10    0    0    0    0    0    0    0    0    0    0    0    0    0    0
  11    2    0    0    0    5    0    0    0    0    0    0    0    0    0
  12    3    4    1    5    0    0    2    0    0    0    0    1    0    0
  13    0    0    2    0    0    0    0    0    0    0    0    0    0    0
  14    0    0    0    0    0    0    0    0    0    0    0    0    0    0
  15    6    1    0    1    0    0    0    0    0    0    0    0    0    0
  16    0    0    0    0    0    0    0    0    0    0    0    0    0    0
  17    7    2    1    6    1    0    1    2    0    1    0    0    0    0
  18    0    0    0    0    0    0    0    0    0    0    0    0    0    0
  19    7    7    0    1    0    0    0    0    0    0    0    0    0    0
  20    1    5    0    1    1    0    0    0    0    0    0    0    0    0
  21    0    1    0    6    3    0    0    0    0    0    0    0    0    0
  22    0    0    0    0    0    0    0    0    0    0    0    0    0    0
  23    1    0    0    0    0    0    0    0    0    0    0    0    0    0
 [ reached getOption("max.print") -- omitted 31 rows ]

calculate the score of cart model

cartrow <- rownames(cartTable)
cartcol <- colnames(cartTable)
cartscore <- sumElementinTable(cartTable,cartrow,cartcol)/sum(cartTable)

the time of cart model

cat("the time of cart",cartTimeTaken , "seconds")
the time of cart 0.320148 seconds

the score of cart model

cat('the score of cart model',cartscore)
the score of cart model 0.02476596

lightgbm

separate x and y from train_set and test_set


train_set.num_X <- select (train_set,-c(Str_h_texture))
test_set.num_X <- select (test_set,-c(Str_h_texture))

start lightgbm machine learning algorithms

lstarttime <- Sys.time()
ltrain = lgb.Dataset(data = as.matrix(train_set.num_X),label = train_set$Str_h_texture, free_raw_data = FALSE)
params <- list(objective="regression", metric="l2")
model <- lgb.cv(params, 
                ltrain , 
                10, 
                nfold=5, 
                min_data=1, 
                learning_rate=1, 
                early_stopping_rounds=10,
                Depth = 8,
                lambda_l1 = 10,
                lambda_l2 = 10
)
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing col-wise multi-threading, the overhead of testing was 0.046899 seconds.
You can set `force_col_wise=true` to remove the overhead.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 37680, number of used features: 451
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing col-wise multi-threading, the overhead of testing was 0.045481 seconds.
You can set `force_col_wise=true` to remove the overhead.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 37679, number of used features: 451
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.033684 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 37679, number of used features: 451
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing col-wise multi-threading, the overhead of testing was 0.029977 seconds.
You can set `force_col_wise=true` to remove the overhead.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 37679, number of used features: 451
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing col-wise multi-threading, the overhead of testing was 0.045433 seconds.
You can set `force_col_wise=true` to remove the overhead.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 37679, number of used features: 451
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Info] Start training from score 29.139066
[LightGBM] [Info] Start training from score 29.136575
[LightGBM] [Info] Start training from score 29.121288
[LightGBM] [Info] Start training from score 29.096473
[LightGBM] [Info] Start training from score 29.103532
[1]:    valid's l2:139.488+2.86001 
[2]:    valid's l2:137.484+2.48886 
[3]:    valid's l2:136.646+2.49735 
[4]:    valid's l2:135.815+2.72855 
[5]:    valid's l2:135.377+2.73764 
[6]:    valid's l2:135.16+2.75332 
[7]:    valid's l2:134.94+2.76135 
[8]:    valid's l2:134.732+2.82976 
[9]:    valid's l2:134.646+2.70722 
[10]:   valid's l2:134.342+2.72809 
lstoptime <- Sys.time()

tunning parameters

num_leaves: This is the main parameter to control the complexity of the tree model. Theoretically, we can set num_leaves = 2^(max_depth) to obtain the same number of leaves as depth-wise tree. However, this simple conversion is not good in practice. The reason is that a leaf-wise tree is typically much deeper than a depth-wise tree for a fixed number of leaves. Unconstrained depth can induce over-fitting. Thus, when trying to tune the num_leaves, we should let it be smaller than 2^(max_depth). For example, when the max_depth=7 the depth-wise tree can get good accuracy, but setting num_leaves to 127 may cause over-fitting, and setting it to 70 or 80 may get better accuracy than depth-wise.

min_data_in_leaf: This is a very important parameter to prevent over-fitting in a leaf-wise tree. Its optimal value depends on the number of training samples and num_leaves. Setting it to a large value can avoid growing too deep a tree, but may cause under-fitting. In practice, setting it to hundreds or thousands is enough for a large dataset.

max_depth: You also can use max_depth to limit the tree depth explicitly.

ltest = lgb.Dataset.create.valid(ltrain , as.matrix(test_set.num_X), label = test_set$Str_h_texture)
valids <- list(test = ltest)

grid_search <- expand.grid(Depth = 7:8,
                           L1 = 8:12,
                           L2 = 8:12)

model <- list()
perf <- numeric(nrow(grid_search))

for (i in 1:nrow(grid_search)) {
  model[[i]] <- lgb.train(list(objective = "regression",
                               metric = "l2",
                               lambda_l1 = grid_search[i, "L1"],
                               lambda_l2 = grid_search[i, "L2"],
                               max_depth = grid_search[i, "Depth"]),
                          ltrain,
                          2,
                          valids,
                          min_data = 1,
                          learning_rate = 1,
                          early_stopping_rounds = 5,
                          num_leaves = 2,
                          num_iterations = 100,
                          min_gain_to_split = 500,)
  
  perf[i] <- min(rbindlist(model[[i]]$record_evals$test$l2))
}
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.040525 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.302 
[2]:    test's l2:147.085 
[3]:    test's l2:145.971 
[4]:    test's l2:145.399 
[5]:    test's l2:144.803 
[6]:    test's l2:144.096 
[7]:    test's l2:143.655 
[8]:    test's l2:143.334 
[9]:    test's l2:143.309 
[10]:   test's l2:143.032 
[11]:   test's l2:142.69 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.731 
[17]:   test's l2:141.41 
[18]:   test's l2:141.305 
[19]:   test's l2:141.189 
[20]:   test's l2:140.947 
[21]:   test's l2:140.851 
[22]:   test's l2:140.723 
[23]:   test's l2:140.544 
[24]:   test's l2:140.392 
[25]:   test's l2:140.275 
[26]:   test's l2:140.16 
[27]:   test's l2:140.108 
[28]:   test's l2:139.963 
[29]:   test's l2:139.883 
[30]:   test's l2:139.684 
[31]:   test's l2:139.527 
[32]:   test's l2:139.458 
[33]:   test's l2:139.465 
[34]:   test's l2:139.358 
[35]:   test's l2:139.331 
[36]:   test's l2:139.258 
[37]:   test's l2:139.133 
[38]:   test's l2:139.152 
[39]:   test's l2:139.175 
[40]:   test's l2:139.04 
[41]:   test's l2:139.026 
[42]:   test's l2:138.988 
[43]:   test's l2:138.883 
[44]:   test's l2:138.839 
[45]:   test's l2:138.788 
[46]:   test's l2:138.749 
[47]:   test's l2:138.755 
[48]:   test's l2:138.624 
[49]:   test's l2:138.583 
[50]:   test's l2:138.583 
[51]:   test's l2:138.557 
[52]:   test's l2:138.537 
[53]:   test's l2:138.529 
[54]:   test's l2:138.457 
[55]:   test's l2:138.449 
[56]:   test's l2:138.366 
[57]:   test's l2:138.264 
[58]:   test's l2:138.244 
[59]:   test's l2:138.217 
[60]:   test's l2:138.126 
[61]:   test's l2:138.063 
[62]:   test's l2:138.062 
[63]:   test's l2:138.031 
[64]:   test's l2:137.99 
[65]:   test's l2:137.93 
[66]:   test's l2:137.852 
[67]:   test's l2:137.758 
[68]:   test's l2:137.726 
[69]:   test's l2:137.723 
[70]:   test's l2:137.619 
[71]:   test's l2:137.611 
[72]:   test's l2:137.545 
[73]:   test's l2:137.496 
[74]:   test's l2:137.526 
[75]:   test's l2:137.488 
[76]:   test's l2:137.439 
[77]:   test's l2:137.403 
[78]:   test's l2:137.425 
[79]:   test's l2:137.324 
[80]:   test's l2:137.322 
[81]:   test's l2:137.287 
[82]:   test's l2:137.331 
[83]:   test's l2:137.34 
[84]:   test's l2:137.363 
[85]:   test's l2:137.294 
[86]:   test's l2:137.284 
[87]:   test's l2:137.278 
[88]:   test's l2:137.211 
[89]:   test's l2:137.233 
[90]:   test's l2:137.135 
[91]:   test's l2:137.131 
[92]:   test's l2:137.145 
[93]:   test's l2:137.154 
[94]:   test's l2:137.099 
[95]:   test's l2:137.027 
[96]:   test's l2:137.047 
[97]:   test's l2:136.998 
[98]:   test's l2:137.011 
[99]:   test's l2:136.952 
[100]:  test's l2:136.973 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.059030 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.302 
[2]:    test's l2:147.085 
[3]:    test's l2:145.971 
[4]:    test's l2:145.399 
[5]:    test's l2:144.803 
[6]:    test's l2:144.096 
[7]:    test's l2:143.655 
[8]:    test's l2:143.334 
[9]:    test's l2:143.309 
[10]:   test's l2:143.032 
[11]:   test's l2:142.69 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.731 
[17]:   test's l2:141.41 
[18]:   test's l2:141.305 
[19]:   test's l2:141.189 
[20]:   test's l2:140.947 
[21]:   test's l2:140.851 
[22]:   test's l2:140.723 
[23]:   test's l2:140.544 
[24]:   test's l2:140.392 
[25]:   test's l2:140.275 
[26]:   test's l2:140.16 
[27]:   test's l2:140.108 
[28]:   test's l2:139.963 
[29]:   test's l2:139.883 
[30]:   test's l2:139.684 
[31]:   test's l2:139.527 
[32]:   test's l2:139.458 
[33]:   test's l2:139.465 
[34]:   test's l2:139.358 
[35]:   test's l2:139.331 
[36]:   test's l2:139.258 
[37]:   test's l2:139.133 
[38]:   test's l2:139.152 
[39]:   test's l2:139.175 
[40]:   test's l2:139.04 
[41]:   test's l2:139.026 
[42]:   test's l2:138.988 
[43]:   test's l2:138.883 
[44]:   test's l2:138.839 
[45]:   test's l2:138.788 
[46]:   test's l2:138.749 
[47]:   test's l2:138.755 
[48]:   test's l2:138.624 
[49]:   test's l2:138.583 
[50]:   test's l2:138.583 
[51]:   test's l2:138.557 
[52]:   test's l2:138.537 
[53]:   test's l2:138.529 
[54]:   test's l2:138.457 
[55]:   test's l2:138.449 
[56]:   test's l2:138.366 
[57]:   test's l2:138.264 
[58]:   test's l2:138.244 
[59]:   test's l2:138.217 
[60]:   test's l2:138.126 
[61]:   test's l2:138.063 
[62]:   test's l2:138.062 
[63]:   test's l2:138.031 
[64]:   test's l2:137.99 
[65]:   test's l2:137.93 
[66]:   test's l2:137.852 
[67]:   test's l2:137.758 
[68]:   test's l2:137.726 
[69]:   test's l2:137.723 
[70]:   test's l2:137.619 
[71]:   test's l2:137.611 
[72]:   test's l2:137.545 
[73]:   test's l2:137.496 
[74]:   test's l2:137.526 
[75]:   test's l2:137.488 
[76]:   test's l2:137.439 
[77]:   test's l2:137.403 
[78]:   test's l2:137.425 
[79]:   test's l2:137.324 
[80]:   test's l2:137.322 
[81]:   test's l2:137.287 
[82]:   test's l2:137.331 
[83]:   test's l2:137.34 
[84]:   test's l2:137.363 
[85]:   test's l2:137.294 
[86]:   test's l2:137.284 
[87]:   test's l2:137.278 
[88]:   test's l2:137.211 
[89]:   test's l2:137.233 
[90]:   test's l2:137.135 
[91]:   test's l2:137.131 
[92]:   test's l2:137.145 
[93]:   test's l2:137.154 
[94]:   test's l2:137.099 
[95]:   test's l2:137.027 
[96]:   test's l2:137.047 
[97]:   test's l2:136.998 
[98]:   test's l2:137.011 
[99]:   test's l2:136.952 
[100]:  test's l2:136.973 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.041312 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.302 
[2]:    test's l2:147.085 
[3]:    test's l2:145.971 
[4]:    test's l2:145.399 
[5]:    test's l2:144.803 
[6]:    test's l2:144.096 
[7]:    test's l2:143.655 
[8]:    test's l2:143.334 
[9]:    test's l2:143.308 
[10]:   test's l2:143.032 
[11]:   test's l2:142.69 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.41 
[18]:   test's l2:141.305 
[19]:   test's l2:141.189 
[20]:   test's l2:140.948 
[21]:   test's l2:140.851 
[22]:   test's l2:140.723 
[23]:   test's l2:140.544 
[24]:   test's l2:140.393 
[25]:   test's l2:140.275 
[26]:   test's l2:140.16 
[27]:   test's l2:140.108 
[28]:   test's l2:139.963 
[29]:   test's l2:139.883 
[30]:   test's l2:139.684 
[31]:   test's l2:139.528 
[32]:   test's l2:139.458 
[33]:   test's l2:139.465 
[34]:   test's l2:139.358 
[35]:   test's l2:139.332 
[36]:   test's l2:139.258 
[37]:   test's l2:139.133 
[38]:   test's l2:139.152 
[39]:   test's l2:139.175 
[40]:   test's l2:139.04 
[41]:   test's l2:139.026 
[42]:   test's l2:138.988 
[43]:   test's l2:138.883 
[44]:   test's l2:138.839 
[45]:   test's l2:138.788 
[46]:   test's l2:138.749 
[47]:   test's l2:138.755 
[48]:   test's l2:138.624 
[49]:   test's l2:138.583 
[50]:   test's l2:138.558 
[51]:   test's l2:138.538 
[52]:   test's l2:138.537 
[53]:   test's l2:138.529 
[54]:   test's l2:138.457 
[55]:   test's l2:138.356 
[56]:   test's l2:138.335 
[57]:   test's l2:138.327 
[58]:   test's l2:138.244 
[59]:   test's l2:138.217 
[60]:   test's l2:138.126 
[61]:   test's l2:138.063 
[62]:   test's l2:138.062 
[63]:   test's l2:138.031 
[64]:   test's l2:137.99 
[65]:   test's l2:137.931 
[66]:   test's l2:137.852 
[67]:   test's l2:137.758 
[68]:   test's l2:137.726 
[69]:   test's l2:137.724 
[70]:   test's l2:137.62 
[71]:   test's l2:137.612 
[72]:   test's l2:137.545 
[73]:   test's l2:137.496 
[74]:   test's l2:137.527 
[75]:   test's l2:137.488 
[76]:   test's l2:137.439 
[77]:   test's l2:137.403 
[78]:   test's l2:137.425 
[79]:   test's l2:137.325 
[80]:   test's l2:137.322 
[81]:   test's l2:137.288 
[82]:   test's l2:137.331 
[83]:   test's l2:137.34 
[84]:   test's l2:137.363 
[85]:   test's l2:137.294 
[86]:   test's l2:137.284 
[87]:   test's l2:137.278 
[88]:   test's l2:137.211 
[89]:   test's l2:137.233 
[90]:   test's l2:137.135 
[91]:   test's l2:137.131 
[92]:   test's l2:137.145 
[93]:   test's l2:137.153 
[94]:   test's l2:137.099 
[95]:   test's l2:137.027 
[96]:   test's l2:137.047 
[97]:   test's l2:136.998 
[98]:   test's l2:137.01 
[99]:   test's l2:136.951 
[100]:  test's l2:136.972 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.057814 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.302 
[2]:    test's l2:147.085 
[3]:    test's l2:145.971 
[4]:    test's l2:145.399 
[5]:    test's l2:144.803 
[6]:    test's l2:144.096 
[7]:    test's l2:143.655 
[8]:    test's l2:143.334 
[9]:    test's l2:143.308 
[10]:   test's l2:143.032 
[11]:   test's l2:142.69 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.41 
[18]:   test's l2:141.305 
[19]:   test's l2:141.189 
[20]:   test's l2:140.948 
[21]:   test's l2:140.851 
[22]:   test's l2:140.723 
[23]:   test's l2:140.544 
[24]:   test's l2:140.393 
[25]:   test's l2:140.275 
[26]:   test's l2:140.16 
[27]:   test's l2:140.108 
[28]:   test's l2:139.963 
[29]:   test's l2:139.883 
[30]:   test's l2:139.684 
[31]:   test's l2:139.528 
[32]:   test's l2:139.458 
[33]:   test's l2:139.465 
[34]:   test's l2:139.358 
[35]:   test's l2:139.332 
[36]:   test's l2:139.258 
[37]:   test's l2:139.133 
[38]:   test's l2:139.152 
[39]:   test's l2:139.175 
[40]:   test's l2:139.04 
[41]:   test's l2:139.026 
[42]:   test's l2:138.988 
[43]:   test's l2:138.883 
[44]:   test's l2:138.839 
[45]:   test's l2:138.788 
[46]:   test's l2:138.749 
[47]:   test's l2:138.755 
[48]:   test's l2:138.624 
[49]:   test's l2:138.583 
[50]:   test's l2:138.558 
[51]:   test's l2:138.538 
[52]:   test's l2:138.537 
[53]:   test's l2:138.529 
[54]:   test's l2:138.457 
[55]:   test's l2:138.356 
[56]:   test's l2:138.335 
[57]:   test's l2:138.327 
[58]:   test's l2:138.244 
[59]:   test's l2:138.217 
[60]:   test's l2:138.126 
[61]:   test's l2:138.063 
[62]:   test's l2:138.062 
[63]:   test's l2:138.031 
[64]:   test's l2:137.99 
[65]:   test's l2:137.931 
[66]:   test's l2:137.852 
[67]:   test's l2:137.758 
[68]:   test's l2:137.726 
[69]:   test's l2:137.724 
[70]:   test's l2:137.62 
[71]:   test's l2:137.612 
[72]:   test's l2:137.545 
[73]:   test's l2:137.496 
[74]:   test's l2:137.527 
[75]:   test's l2:137.488 
[76]:   test's l2:137.439 
[77]:   test's l2:137.403 
[78]:   test's l2:137.425 
[79]:   test's l2:137.325 
[80]:   test's l2:137.322 
[81]:   test's l2:137.288 
[82]:   test's l2:137.331 
[83]:   test's l2:137.34 
[84]:   test's l2:137.363 
[85]:   test's l2:137.294 
[86]:   test's l2:137.284 
[87]:   test's l2:137.278 
[88]:   test's l2:137.211 
[89]:   test's l2:137.233 
[90]:   test's l2:137.135 
[91]:   test's l2:137.131 
[92]:   test's l2:137.145 
[93]:   test's l2:137.153 
[94]:   test's l2:137.099 
[95]:   test's l2:137.027 
[96]:   test's l2:137.047 
[97]:   test's l2:136.998 
[98]:   test's l2:137.01 
[99]:   test's l2:136.951 
[100]:  test's l2:136.972 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.052319 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.302 
[2]:    test's l2:147.085 
[3]:    test's l2:145.971 
[4]:    test's l2:145.399 
[5]:    test's l2:144.803 
[6]:    test's l2:144.096 
[7]:    test's l2:143.655 
[8]:    test's l2:143.334 
[9]:    test's l2:143.308 
[10]:   test's l2:143.032 
[11]:   test's l2:142.689 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.41 
[18]:   test's l2:141.305 
[19]:   test's l2:141.189 
[20]:   test's l2:140.948 
[21]:   test's l2:140.851 
[22]:   test's l2:140.723 
[23]:   test's l2:140.544 
[24]:   test's l2:140.393 
[25]:   test's l2:140.275 
[26]:   test's l2:140.161 
[27]:   test's l2:140.108 
[28]:   test's l2:139.963 
[29]:   test's l2:139.883 
[30]:   test's l2:139.684 
[31]:   test's l2:139.528 
[32]:   test's l2:139.459 
[33]:   test's l2:139.466 
[34]:   test's l2:139.359 
[35]:   test's l2:139.332 
[36]:   test's l2:139.259 
[37]:   test's l2:139.133 
[38]:   test's l2:139.152 
[39]:   test's l2:139.175 
[40]:   test's l2:139.04 
[41]:   test's l2:139.026 
[42]:   test's l2:138.988 
[43]:   test's l2:138.883 
[44]:   test's l2:138.839 
[45]:   test's l2:138.788 
[46]:   test's l2:138.749 
[47]:   test's l2:138.755 
[48]:   test's l2:138.624 
[49]:   test's l2:138.583 
[50]:   test's l2:138.558 
[51]:   test's l2:138.537 
[52]:   test's l2:138.536 
[53]:   test's l2:138.528 
[54]:   test's l2:138.457 
[55]:   test's l2:138.356 
[56]:   test's l2:138.335 
[57]:   test's l2:138.327 
[58]:   test's l2:138.243 
[59]:   test's l2:138.216 
[60]:   test's l2:138.126 
[61]:   test's l2:138.063 
[62]:   test's l2:138.062 
[63]:   test's l2:138.031 
[64]:   test's l2:137.99 
[65]:   test's l2:137.93 
[66]:   test's l2:137.852 
[67]:   test's l2:137.758 
[68]:   test's l2:137.726 
[69]:   test's l2:137.723 
[70]:   test's l2:137.619 
[71]:   test's l2:137.611 
[72]:   test's l2:137.545 
[73]:   test's l2:137.496 
[74]:   test's l2:137.526 
[75]:   test's l2:137.488 
[76]:   test's l2:137.439 
[77]:   test's l2:137.403 
[78]:   test's l2:137.424 
[79]:   test's l2:137.324 
[80]:   test's l2:137.322 
[81]:   test's l2:137.288 
[82]:   test's l2:137.33 
[83]:   test's l2:137.339 
[84]:   test's l2:137.362 
[85]:   test's l2:137.294 
[86]:   test's l2:137.284 
[87]:   test's l2:137.277 
[88]:   test's l2:137.21 
[89]:   test's l2:137.232 
[90]:   test's l2:137.134 
[91]:   test's l2:137.13 
[92]:   test's l2:137.144 
[93]:   test's l2:137.152 
[94]:   test's l2:137.098 
[95]:   test's l2:137.026 
[96]:   test's l2:137.046 
[97]:   test's l2:136.996 
[98]:   test's l2:137.009 
[99]:   test's l2:136.95 
[100]:  test's l2:136.971 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.040858 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.302 
[2]:    test's l2:147.085 
[3]:    test's l2:145.971 
[4]:    test's l2:145.399 
[5]:    test's l2:144.803 
[6]:    test's l2:144.096 
[7]:    test's l2:143.655 
[8]:    test's l2:143.334 
[9]:    test's l2:143.308 
[10]:   test's l2:143.032 
[11]:   test's l2:142.689 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.41 
[18]:   test's l2:141.305 
[19]:   test's l2:141.189 
[20]:   test's l2:140.948 
[21]:   test's l2:140.851 
[22]:   test's l2:140.723 
[23]:   test's l2:140.544 
[24]:   test's l2:140.393 
[25]:   test's l2:140.275 
[26]:   test's l2:140.161 
[27]:   test's l2:140.108 
[28]:   test's l2:139.963 
[29]:   test's l2:139.883 
[30]:   test's l2:139.684 
[31]:   test's l2:139.528 
[32]:   test's l2:139.459 
[33]:   test's l2:139.466 
[34]:   test's l2:139.359 
[35]:   test's l2:139.332 
[36]:   test's l2:139.259 
[37]:   test's l2:139.133 
[38]:   test's l2:139.152 
[39]:   test's l2:139.175 
[40]:   test's l2:139.04 
[41]:   test's l2:139.026 
[42]:   test's l2:138.988 
[43]:   test's l2:138.883 
[44]:   test's l2:138.839 
[45]:   test's l2:138.788 
[46]:   test's l2:138.749 
[47]:   test's l2:138.755 
[48]:   test's l2:138.624 
[49]:   test's l2:138.583 
[50]:   test's l2:138.558 
[51]:   test's l2:138.537 
[52]:   test's l2:138.536 
[53]:   test's l2:138.528 
[54]:   test's l2:138.457 
[55]:   test's l2:138.356 
[56]:   test's l2:138.335 
[57]:   test's l2:138.327 
[58]:   test's l2:138.243 
[59]:   test's l2:138.216 
[60]:   test's l2:138.126 
[61]:   test's l2:138.063 
[62]:   test's l2:138.062 
[63]:   test's l2:138.031 
[64]:   test's l2:137.99 
[65]:   test's l2:137.93 
[66]:   test's l2:137.852 
[67]:   test's l2:137.758 
[68]:   test's l2:137.726 
[69]:   test's l2:137.723 
[70]:   test's l2:137.619 
[71]:   test's l2:137.611 
[72]:   test's l2:137.545 
[73]:   test's l2:137.496 
[74]:   test's l2:137.526 
[75]:   test's l2:137.488 
[76]:   test's l2:137.439 
[77]:   test's l2:137.403 
[78]:   test's l2:137.424 
[79]:   test's l2:137.324 
[80]:   test's l2:137.322 
[81]:   test's l2:137.288 
[82]:   test's l2:137.33 
[83]:   test's l2:137.339 
[84]:   test's l2:137.362 
[85]:   test's l2:137.294 
[86]:   test's l2:137.284 
[87]:   test's l2:137.277 
[88]:   test's l2:137.21 
[89]:   test's l2:137.232 
[90]:   test's l2:137.134 
[91]:   test's l2:137.13 
[92]:   test's l2:137.144 
[93]:   test's l2:137.152 
[94]:   test's l2:137.098 
[95]:   test's l2:137.026 
[96]:   test's l2:137.046 
[97]:   test's l2:136.996 
[98]:   test's l2:137.009 
[99]:   test's l2:136.95 
[100]:  test's l2:136.971 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.038173 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.302 
[2]:    test's l2:147.085 
[3]:    test's l2:145.971 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.096 
[7]:    test's l2:143.655 
[8]:    test's l2:143.334 
[9]:    test's l2:143.308 
[10]:   test's l2:143.032 
[11]:   test's l2:142.689 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.41 
[18]:   test's l2:141.305 
[19]:   test's l2:141.189 
[20]:   test's l2:140.948 
[21]:   test's l2:140.851 
[22]:   test's l2:140.723 
[23]:   test's l2:140.544 
[24]:   test's l2:140.393 
[25]:   test's l2:140.275 
[26]:   test's l2:140.161 
[27]:   test's l2:140.108 
[28]:   test's l2:139.963 
[29]:   test's l2:139.883 
[30]:   test's l2:139.685 
[31]:   test's l2:139.529 
[32]:   test's l2:139.459 
[33]:   test's l2:139.466 
[34]:   test's l2:139.359 
[35]:   test's l2:139.332 
[36]:   test's l2:139.259 
[37]:   test's l2:139.134 
[38]:   test's l2:139.152 
[39]:   test's l2:139.175 
[40]:   test's l2:139.04 
[41]:   test's l2:139.026 
[42]:   test's l2:138.988 
[43]:   test's l2:138.883 
[44]:   test's l2:138.839 
[45]:   test's l2:138.788 
[46]:   test's l2:138.749 
[47]:   test's l2:138.755 
[48]:   test's l2:138.624 
[49]:   test's l2:138.583 
[50]:   test's l2:138.558 
[51]:   test's l2:138.537 
[52]:   test's l2:138.536 
[53]:   test's l2:138.528 
[54]:   test's l2:138.425 
[55]:   test's l2:138.405 
[56]:   test's l2:138.334 
[57]:   test's l2:138.307 
[58]:   test's l2:138.299 
[59]:   test's l2:138.214 
[60]:   test's l2:138.151 
[61]:   test's l2:138.061 
[62]:   test's l2:138.06 
[63]:   test's l2:138.029 
[64]:   test's l2:137.988 
[65]:   test's l2:137.929 
[66]:   test's l2:137.85 
[67]:   test's l2:137.757 
[68]:   test's l2:137.724 
[69]:   test's l2:137.722 
[70]:   test's l2:137.618 
[71]:   test's l2:137.61 
[72]:   test's l2:137.543 
[73]:   test's l2:137.494 
[74]:   test's l2:137.525 
[75]:   test's l2:137.5 
[76]:   test's l2:137.436 
[77]:   test's l2:137.458 
[78]:   test's l2:137.42 
[79]:   test's l2:137.321 
[80]:   test's l2:137.319 
[81]:   test's l2:137.284 
[82]:   test's l2:137.327 
[83]:   test's l2:137.336 
[84]:   test's l2:137.359 
[85]:   test's l2:137.29 
[86]:   test's l2:137.28 
[87]:   test's l2:137.274 
[88]:   test's l2:137.207 
[89]:   test's l2:137.229 
[90]:   test's l2:137.131 
[91]:   test's l2:137.127 
[92]:   test's l2:137.141 
[93]:   test's l2:137.087 
[94]:   test's l2:137.016 
[95]:   test's l2:137.024 
[96]:   test's l2:137.044 
[97]:   test's l2:136.995 
[98]:   test's l2:137.007 
[99]:   test's l2:136.949 
[100]:  test's l2:136.969 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.057746 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.302 
[2]:    test's l2:147.085 
[3]:    test's l2:145.971 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.096 
[7]:    test's l2:143.655 
[8]:    test's l2:143.334 
[9]:    test's l2:143.308 
[10]:   test's l2:143.032 
[11]:   test's l2:142.689 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.41 
[18]:   test's l2:141.305 
[19]:   test's l2:141.189 
[20]:   test's l2:140.948 
[21]:   test's l2:140.851 
[22]:   test's l2:140.723 
[23]:   test's l2:140.544 
[24]:   test's l2:140.393 
[25]:   test's l2:140.275 
[26]:   test's l2:140.161 
[27]:   test's l2:140.108 
[28]:   test's l2:139.963 
[29]:   test's l2:139.883 
[30]:   test's l2:139.685 
[31]:   test's l2:139.529 
[32]:   test's l2:139.459 
[33]:   test's l2:139.466 
[34]:   test's l2:139.359 
[35]:   test's l2:139.332 
[36]:   test's l2:139.259 
[37]:   test's l2:139.134 
[38]:   test's l2:139.152 
[39]:   test's l2:139.175 
[40]:   test's l2:139.04 
[41]:   test's l2:139.026 
[42]:   test's l2:138.988 
[43]:   test's l2:138.883 
[44]:   test's l2:138.839 
[45]:   test's l2:138.788 
[46]:   test's l2:138.749 
[47]:   test's l2:138.755 
[48]:   test's l2:138.624 
[49]:   test's l2:138.583 
[50]:   test's l2:138.558 
[51]:   test's l2:138.537 
[52]:   test's l2:138.536 
[53]:   test's l2:138.528 
[54]:   test's l2:138.425 
[55]:   test's l2:138.405 
[56]:   test's l2:138.334 
[57]:   test's l2:138.307 
[58]:   test's l2:138.299 
[59]:   test's l2:138.214 
[60]:   test's l2:138.151 
[61]:   test's l2:138.061 
[62]:   test's l2:138.06 
[63]:   test's l2:138.029 
[64]:   test's l2:137.988 
[65]:   test's l2:137.929 
[66]:   test's l2:137.85 
[67]:   test's l2:137.757 
[68]:   test's l2:137.724 
[69]:   test's l2:137.722 
[70]:   test's l2:137.618 
[71]:   test's l2:137.61 
[72]:   test's l2:137.543 
[73]:   test's l2:137.494 
[74]:   test's l2:137.525 
[75]:   test's l2:137.5 
[76]:   test's l2:137.436 
[77]:   test's l2:137.458 
[78]:   test's l2:137.42 
[79]:   test's l2:137.321 
[80]:   test's l2:137.319 
[81]:   test's l2:137.284 
[82]:   test's l2:137.327 
[83]:   test's l2:137.336 
[84]:   test's l2:137.359 
[85]:   test's l2:137.29 
[86]:   test's l2:137.28 
[87]:   test's l2:137.274 
[88]:   test's l2:137.207 
[89]:   test's l2:137.229 
[90]:   test's l2:137.131 
[91]:   test's l2:137.127 
[92]:   test's l2:137.141 
[93]:   test's l2:137.087 
[94]:   test's l2:137.016 
[95]:   test's l2:137.024 
[96]:   test's l2:137.044 
[97]:   test's l2:136.995 
[98]:   test's l2:137.007 
[99]:   test's l2:136.949 
[100]:  test's l2:136.969 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.060413 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.085 
[3]:    test's l2:145.971 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.096 
[7]:    test's l2:143.655 
[8]:    test's l2:143.334 
[9]:    test's l2:143.308 
[10]:   test's l2:143.032 
[11]:   test's l2:142.689 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.41 
[18]:   test's l2:141.305 
[19]:   test's l2:141.189 
[20]:   test's l2:140.948 
[21]:   test's l2:140.851 
[22]:   test's l2:140.723 
[23]:   test's l2:140.544 
[24]:   test's l2:140.393 
[25]:   test's l2:140.275 
[26]:   test's l2:140.161 
[27]:   test's l2:140.108 
[28]:   test's l2:139.963 
[29]:   test's l2:139.883 
[30]:   test's l2:139.685 
[31]:   test's l2:139.529 
[32]:   test's l2:139.459 
[33]:   test's l2:139.466 
[34]:   test's l2:139.359 
[35]:   test's l2:139.332 
[36]:   test's l2:139.259 
[37]:   test's l2:139.134 
[38]:   test's l2:139.153 
[39]:   test's l2:139.175 
[40]:   test's l2:139.146 
[41]:   test's l2:139.063 
[42]:   test's l2:138.928 
[43]:   test's l2:138.823 
[44]:   test's l2:138.827 
[45]:   test's l2:138.785 
[46]:   test's l2:138.734 
[47]:   test's l2:138.697 
[48]:   test's l2:138.565 
[49]:   test's l2:138.525 
[50]:   test's l2:138.499 
[51]:   test's l2:138.478 
[52]:   test's l2:138.469 
[53]:   test's l2:138.364 
[54]:   test's l2:138.343 
[55]:   test's l2:138.342 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.236 
[59]:   test's l2:138.151 
[60]:   test's l2:138.088 
[61]:   test's l2:137.991 
[62]:   test's l2:137.98 
[63]:   test's l2:137.887 
[64]:   test's l2:137.816 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.58 
[68]:   test's l2:137.55 
[69]:   test's l2:137.511 
[70]:   test's l2:137.514 
[71]:   test's l2:137.474 
[72]:   test's l2:137.475 
[73]:   test's l2:137.436 
[74]:   test's l2:137.388 
[75]:   test's l2:137.353 
[76]:   test's l2:137.375 
[77]:   test's l2:137.326 
[78]:   test's l2:137.357 
[79]:   test's l2:137.26 
[80]:   test's l2:137.195 
[81]:   test's l2:137.139 
[82]:   test's l2:137.137 
[83]:   test's l2:137.101 
[84]:   test's l2:137.143 
[85]:   test's l2:137.136 
[86]:   test's l2:137.086 
[87]:   test's l2:137.091 
[88]:   test's l2:137.014 
[89]:   test's l2:137.035 
[90]:   test's l2:137.042 
[91]:   test's l2:136.976 
[92]:   test's l2:136.921 
[93]:   test's l2:136.931 
[94]:   test's l2:136.873 
[95]:   test's l2:136.827 
[96]:   test's l2:136.777 
[97]:   test's l2:136.774 
[98]:   test's l2:136.725 
[99]:   test's l2:136.746 
[100]:  test's l2:136.692 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing col-wise multi-threading, the overhead of testing was 0.058522 seconds.
You can set `force_col_wise=true` to remove the overhead.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.085 
[3]:    test's l2:145.971 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.096 
[7]:    test's l2:143.655 
[8]:    test's l2:143.334 
[9]:    test's l2:143.308 
[10]:   test's l2:143.032 
[11]:   test's l2:142.689 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.41 
[18]:   test's l2:141.305 
[19]:   test's l2:141.189 
[20]:   test's l2:140.948 
[21]:   test's l2:140.851 
[22]:   test's l2:140.723 
[23]:   test's l2:140.544 
[24]:   test's l2:140.393 
[25]:   test's l2:140.275 
[26]:   test's l2:140.161 
[27]:   test's l2:140.108 
[28]:   test's l2:139.963 
[29]:   test's l2:139.883 
[30]:   test's l2:139.685 
[31]:   test's l2:139.529 
[32]:   test's l2:139.459 
[33]:   test's l2:139.466 
[34]:   test's l2:139.359 
[35]:   test's l2:139.332 
[36]:   test's l2:139.259 
[37]:   test's l2:139.134 
[38]:   test's l2:139.153 
[39]:   test's l2:139.175 
[40]:   test's l2:139.146 
[41]:   test's l2:139.063 
[42]:   test's l2:138.928 
[43]:   test's l2:138.823 
[44]:   test's l2:138.827 
[45]:   test's l2:138.785 
[46]:   test's l2:138.734 
[47]:   test's l2:138.697 
[48]:   test's l2:138.565 
[49]:   test's l2:138.525 
[50]:   test's l2:138.499 
[51]:   test's l2:138.478 
[52]:   test's l2:138.469 
[53]:   test's l2:138.364 
[54]:   test's l2:138.343 
[55]:   test's l2:138.342 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.236 
[59]:   test's l2:138.151 
[60]:   test's l2:138.088 
[61]:   test's l2:137.991 
[62]:   test's l2:137.98 
[63]:   test's l2:137.887 
[64]:   test's l2:137.816 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.58 
[68]:   test's l2:137.55 
[69]:   test's l2:137.511 
[70]:   test's l2:137.514 
[71]:   test's l2:137.474 
[72]:   test's l2:137.475 
[73]:   test's l2:137.436 
[74]:   test's l2:137.388 
[75]:   test's l2:137.353 
[76]:   test's l2:137.375 
[77]:   test's l2:137.326 
[78]:   test's l2:137.357 
[79]:   test's l2:137.26 
[80]:   test's l2:137.195 
[81]:   test's l2:137.139 
[82]:   test's l2:137.137 
[83]:   test's l2:137.101 
[84]:   test's l2:137.143 
[85]:   test's l2:137.136 
[86]:   test's l2:137.086 
[87]:   test's l2:137.091 
[88]:   test's l2:137.014 
[89]:   test's l2:137.035 
[90]:   test's l2:137.042 
[91]:   test's l2:136.976 
[92]:   test's l2:136.921 
[93]:   test's l2:136.931 
[94]:   test's l2:136.873 
[95]:   test's l2:136.827 
[96]:   test's l2:136.777 
[97]:   test's l2:136.774 
[98]:   test's l2:136.725 
[99]:   test's l2:136.746 
[100]:  test's l2:136.692 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing col-wise multi-threading, the overhead of testing was 0.059520 seconds.
You can set `force_col_wise=true` to remove the overhead.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.655 
[8]:    test's l2:143.333 
[9]:    test's l2:143.308 
[10]:   test's l2:143.032 
[11]:   test's l2:142.689 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.413 
[18]:   test's l2:141.308 
[19]:   test's l2:141.192 
[20]:   test's l2:140.951 
[21]:   test's l2:140.854 
[22]:   test's l2:140.726 
[23]:   test's l2:140.547 
[24]:   test's l2:140.396 
[25]:   test's l2:140.278 
[26]:   test's l2:140.164 
[27]:   test's l2:140.111 
[28]:   test's l2:139.966 
[29]:   test's l2:139.886 
[30]:   test's l2:139.689 
[31]:   test's l2:139.533 
[32]:   test's l2:139.463 
[33]:   test's l2:139.47 
[34]:   test's l2:139.363 
[35]:   test's l2:139.336 
[36]:   test's l2:139.263 
[37]:   test's l2:139.137 
[38]:   test's l2:139.156 
[39]:   test's l2:139.177 
[40]:   test's l2:139.148 
[41]:   test's l2:139.064 
[42]:   test's l2:138.93 
[43]:   test's l2:138.824 
[44]:   test's l2:138.78 
[45]:   test's l2:138.729 
[46]:   test's l2:138.691 
[47]:   test's l2:138.697 
[48]:   test's l2:138.565 
[49]:   test's l2:138.524 
[50]:   test's l2:138.499 
[51]:   test's l2:138.477 
[52]:   test's l2:138.468 
[53]:   test's l2:138.363 
[54]:   test's l2:138.342 
[55]:   test's l2:138.341 
[56]:   test's l2:138.313 
[57]:   test's l2:138.243 
[58]:   test's l2:138.235 
[59]:   test's l2:138.15 
[60]:   test's l2:138.086 
[61]:   test's l2:137.993 
[62]:   test's l2:137.895 
[63]:   test's l2:137.885 
[64]:   test's l2:137.814 
[65]:   test's l2:137.752 
[66]:   test's l2:137.671 
[67]:   test's l2:137.578 
[68]:   test's l2:137.548 
[69]:   test's l2:137.509 
[70]:   test's l2:137.512 
[71]:   test's l2:137.472 
[72]:   test's l2:137.433 
[73]:   test's l2:137.434 
[74]:   test's l2:137.386 
[75]:   test's l2:137.351 
[76]:   test's l2:137.373 
[77]:   test's l2:137.323 
[78]:   test's l2:137.354 
[79]:   test's l2:137.257 
[80]:   test's l2:137.194 
[81]:   test's l2:137.138 
[82]:   test's l2:137.135 
[83]:   test's l2:137.1 
[84]:   test's l2:137.092 
[85]:   test's l2:137.134 
[86]:   test's l2:137.084 
[87]:   test's l2:137.089 
[88]:   test's l2:137.012 
[89]:   test's l2:137.032 
[90]:   test's l2:137.039 
[91]:   test's l2:136.973 
[92]:   test's l2:136.918 
[93]:   test's l2:136.928 
[94]:   test's l2:136.87 
[95]:   test's l2:136.823 
[96]:   test's l2:136.774 
[97]:   test's l2:136.77 
[98]:   test's l2:136.722 
[99]:   test's l2:136.742 
[100]:  test's l2:136.688 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.055647 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.655 
[8]:    test's l2:143.333 
[9]:    test's l2:143.308 
[10]:   test's l2:143.032 
[11]:   test's l2:142.689 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.413 
[18]:   test's l2:141.308 
[19]:   test's l2:141.192 
[20]:   test's l2:140.951 
[21]:   test's l2:140.854 
[22]:   test's l2:140.726 
[23]:   test's l2:140.547 
[24]:   test's l2:140.396 
[25]:   test's l2:140.278 
[26]:   test's l2:140.164 
[27]:   test's l2:140.111 
[28]:   test's l2:139.966 
[29]:   test's l2:139.886 
[30]:   test's l2:139.689 
[31]:   test's l2:139.533 
[32]:   test's l2:139.463 
[33]:   test's l2:139.47 
[34]:   test's l2:139.363 
[35]:   test's l2:139.336 
[36]:   test's l2:139.263 
[37]:   test's l2:139.137 
[38]:   test's l2:139.156 
[39]:   test's l2:139.177 
[40]:   test's l2:139.148 
[41]:   test's l2:139.064 
[42]:   test's l2:138.93 
[43]:   test's l2:138.824 
[44]:   test's l2:138.78 
[45]:   test's l2:138.729 
[46]:   test's l2:138.691 
[47]:   test's l2:138.697 
[48]:   test's l2:138.565 
[49]:   test's l2:138.524 
[50]:   test's l2:138.499 
[51]:   test's l2:138.477 
[52]:   test's l2:138.468 
[53]:   test's l2:138.363 
[54]:   test's l2:138.342 
[55]:   test's l2:138.341 
[56]:   test's l2:138.313 
[57]:   test's l2:138.243 
[58]:   test's l2:138.235 
[59]:   test's l2:138.15 
[60]:   test's l2:138.086 
[61]:   test's l2:137.993 
[62]:   test's l2:137.895 
[63]:   test's l2:137.885 
[64]:   test's l2:137.814 
[65]:   test's l2:137.752 
[66]:   test's l2:137.671 
[67]:   test's l2:137.578 
[68]:   test's l2:137.548 
[69]:   test's l2:137.509 
[70]:   test's l2:137.512 
[71]:   test's l2:137.472 
[72]:   test's l2:137.433 
[73]:   test's l2:137.434 
[74]:   test's l2:137.386 
[75]:   test's l2:137.351 
[76]:   test's l2:137.373 
[77]:   test's l2:137.323 
[78]:   test's l2:137.354 
[79]:   test's l2:137.257 
[80]:   test's l2:137.194 
[81]:   test's l2:137.138 
[82]:   test's l2:137.135 
[83]:   test's l2:137.1 
[84]:   test's l2:137.092 
[85]:   test's l2:137.134 
[86]:   test's l2:137.084 
[87]:   test's l2:137.089 
[88]:   test's l2:137.012 
[89]:   test's l2:137.032 
[90]:   test's l2:137.039 
[91]:   test's l2:136.973 
[92]:   test's l2:136.918 
[93]:   test's l2:136.928 
[94]:   test's l2:136.87 
[95]:   test's l2:136.823 
[96]:   test's l2:136.774 
[97]:   test's l2:136.77 
[98]:   test's l2:136.722 
[99]:   test's l2:136.742 
[100]:  test's l2:136.688 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing col-wise multi-threading, the overhead of testing was 0.065752 seconds.
You can set `force_col_wise=true` to remove the overhead.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.655 
[8]:    test's l2:143.333 
[9]:    test's l2:143.308 
[10]:   test's l2:143.031 
[11]:   test's l2:142.689 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.413 
[18]:   test's l2:141.308 
[19]:   test's l2:141.192 
[20]:   test's l2:140.951 
[21]:   test's l2:140.854 
[22]:   test's l2:140.726 
[23]:   test's l2:140.547 
[24]:   test's l2:140.396 
[25]:   test's l2:140.278 
[26]:   test's l2:140.164 
[27]:   test's l2:140.111 
[28]:   test's l2:139.966 
[29]:   test's l2:139.886 
[30]:   test's l2:139.689 
[31]:   test's l2:139.533 
[32]:   test's l2:139.464 
[33]:   test's l2:139.47 
[34]:   test's l2:139.363 
[35]:   test's l2:139.336 
[36]:   test's l2:139.263 
[37]:   test's l2:139.138 
[38]:   test's l2:139.156 
[39]:   test's l2:139.177 
[40]:   test's l2:139.148 
[41]:   test's l2:139.064 
[42]:   test's l2:138.931 
[43]:   test's l2:138.824 
[44]:   test's l2:138.78 
[45]:   test's l2:138.729 
[46]:   test's l2:138.691 
[47]:   test's l2:138.697 
[48]:   test's l2:138.565 
[49]:   test's l2:138.524 
[50]:   test's l2:138.499 
[51]:   test's l2:138.477 
[52]:   test's l2:138.468 
[53]:   test's l2:138.362 
[54]:   test's l2:138.342 
[55]:   test's l2:138.34 
[56]:   test's l2:138.312 
[57]:   test's l2:138.242 
[58]:   test's l2:138.234 
[59]:   test's l2:138.149 
[60]:   test's l2:138.086 
[61]:   test's l2:137.993 
[62]:   test's l2:137.894 
[63]:   test's l2:137.884 
[64]:   test's l2:137.814 
[65]:   test's l2:137.752 
[66]:   test's l2:137.671 
[67]:   test's l2:137.578 
[68]:   test's l2:137.548 
[69]:   test's l2:137.509 
[70]:   test's l2:137.512 
[71]:   test's l2:137.472 
[72]:   test's l2:137.433 
[73]:   test's l2:137.434 
[74]:   test's l2:137.386 
[75]:   test's l2:137.417 
[76]:   test's l2:137.369 
[77]:   test's l2:137.334 
[78]:   test's l2:137.356 
[79]:   test's l2:137.259 
[80]:   test's l2:137.196 
[81]:   test's l2:137.14 
[82]:   test's l2:137.137 
[83]:   test's l2:137.102 
[84]:   test's l2:137.094 
[85]:   test's l2:137.136 
[86]:   test's l2:137.086 
[87]:   test's l2:137.091 
[88]:   test's l2:137.014 
[89]:   test's l2:137.034 
[90]:   test's l2:137.041 
[91]:   test's l2:136.975 
[92]:   test's l2:136.919 
[93]:   test's l2:136.93 
[94]:   test's l2:136.872 
[95]:   test's l2:136.825 
[96]:   test's l2:136.775 
[97]:   test's l2:136.772 
[98]:   test's l2:136.723 
[99]:   test's l2:136.669 
[100]:  test's l2:136.689 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.052158 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.655 
[8]:    test's l2:143.333 
[9]:    test's l2:143.308 
[10]:   test's l2:143.031 
[11]:   test's l2:142.689 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.413 
[18]:   test's l2:141.308 
[19]:   test's l2:141.192 
[20]:   test's l2:140.951 
[21]:   test's l2:140.854 
[22]:   test's l2:140.726 
[23]:   test's l2:140.547 
[24]:   test's l2:140.396 
[25]:   test's l2:140.278 
[26]:   test's l2:140.164 
[27]:   test's l2:140.111 
[28]:   test's l2:139.966 
[29]:   test's l2:139.886 
[30]:   test's l2:139.689 
[31]:   test's l2:139.533 
[32]:   test's l2:139.464 
[33]:   test's l2:139.47 
[34]:   test's l2:139.363 
[35]:   test's l2:139.336 
[36]:   test's l2:139.263 
[37]:   test's l2:139.138 
[38]:   test's l2:139.156 
[39]:   test's l2:139.177 
[40]:   test's l2:139.148 
[41]:   test's l2:139.064 
[42]:   test's l2:138.931 
[43]:   test's l2:138.824 
[44]:   test's l2:138.78 
[45]:   test's l2:138.729 
[46]:   test's l2:138.691 
[47]:   test's l2:138.697 
[48]:   test's l2:138.565 
[49]:   test's l2:138.524 
[50]:   test's l2:138.499 
[51]:   test's l2:138.477 
[52]:   test's l2:138.468 
[53]:   test's l2:138.362 
[54]:   test's l2:138.342 
[55]:   test's l2:138.34 
[56]:   test's l2:138.312 
[57]:   test's l2:138.242 
[58]:   test's l2:138.234 
[59]:   test's l2:138.149 
[60]:   test's l2:138.086 
[61]:   test's l2:137.993 
[62]:   test's l2:137.894 
[63]:   test's l2:137.884 
[64]:   test's l2:137.814 
[65]:   test's l2:137.752 
[66]:   test's l2:137.671 
[67]:   test's l2:137.578 
[68]:   test's l2:137.548 
[69]:   test's l2:137.509 
[70]:   test's l2:137.512 
[71]:   test's l2:137.472 
[72]:   test's l2:137.433 
[73]:   test's l2:137.434 
[74]:   test's l2:137.386 
[75]:   test's l2:137.417 
[76]:   test's l2:137.369 
[77]:   test's l2:137.334 
[78]:   test's l2:137.356 
[79]:   test's l2:137.259 
[80]:   test's l2:137.196 
[81]:   test's l2:137.14 
[82]:   test's l2:137.137 
[83]:   test's l2:137.102 
[84]:   test's l2:137.094 
[85]:   test's l2:137.136 
[86]:   test's l2:137.086 
[87]:   test's l2:137.091 
[88]:   test's l2:137.014 
[89]:   test's l2:137.034 
[90]:   test's l2:137.041 
[91]:   test's l2:136.975 
[92]:   test's l2:136.919 
[93]:   test's l2:136.93 
[94]:   test's l2:136.872 
[95]:   test's l2:136.825 
[96]:   test's l2:136.775 
[97]:   test's l2:136.772 
[98]:   test's l2:136.723 
[99]:   test's l2:136.669 
[100]:  test's l2:136.689 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.054171 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.655 
[8]:    test's l2:143.333 
[9]:    test's l2:143.307 
[10]:   test's l2:143.031 
[11]:   test's l2:142.688 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.413 
[18]:   test's l2:141.308 
[19]:   test's l2:141.192 
[20]:   test's l2:140.951 
[21]:   test's l2:140.854 
[22]:   test's l2:140.726 
[23]:   test's l2:140.547 
[24]:   test's l2:140.396 
[25]:   test's l2:140.278 
[26]:   test's l2:140.164 
[27]:   test's l2:140.111 
[28]:   test's l2:139.966 
[29]:   test's l2:139.886 
[30]:   test's l2:139.69 
[31]:   test's l2:139.534 
[32]:   test's l2:139.464 
[33]:   test's l2:139.471 
[34]:   test's l2:139.363 
[35]:   test's l2:139.336 
[36]:   test's l2:139.264 
[37]:   test's l2:139.138 
[38]:   test's l2:139.156 
[39]:   test's l2:139.177 
[40]:   test's l2:139.148 
[41]:   test's l2:139.064 
[42]:   test's l2:138.931 
[43]:   test's l2:138.824 
[44]:   test's l2:138.78 
[45]:   test's l2:138.729 
[46]:   test's l2:138.691 
[47]:   test's l2:138.697 
[48]:   test's l2:138.565 
[49]:   test's l2:138.524 
[50]:   test's l2:138.499 
[51]:   test's l2:138.477 
[52]:   test's l2:138.373 
[53]:   test's l2:138.351 
[54]:   test's l2:138.343 
[55]:   test's l2:138.341 
[56]:   test's l2:138.313 
[57]:   test's l2:138.243 
[58]:   test's l2:138.235 
[59]:   test's l2:138.15 
[60]:   test's l2:138.086 
[61]:   test's l2:137.991 
[62]:   test's l2:137.98 
[63]:   test's l2:137.909 
[64]:   test's l2:137.816 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.581 
[68]:   test's l2:137.551 
[69]:   test's l2:137.512 
[70]:   test's l2:137.514 
[71]:   test's l2:137.475 
[72]:   test's l2:137.436 
[73]:   test's l2:137.437 
[74]:   test's l2:137.389 
[75]:   test's l2:137.354 
[76]:   test's l2:137.375 
[77]:   test's l2:137.326 
[78]:   test's l2:137.357 
[79]:   test's l2:137.261 
[80]:   test's l2:137.196 
[81]:   test's l2:137.14 
[82]:   test's l2:137.138 
[83]:   test's l2:137.102 
[84]:   test's l2:137.095 
[85]:   test's l2:137.045 
[86]:   test's l2:137.05 
[87]:   test's l2:137.091 
[88]:   test's l2:137.013 
[89]:   test's l2:137.034 
[90]:   test's l2:137.041 
[91]:   test's l2:136.975 
[92]:   test's l2:136.919 
[93]:   test's l2:136.929 
[94]:   test's l2:136.871 
[95]:   test's l2:136.825 
[96]:   test's l2:136.775 
[97]:   test's l2:136.772 
[98]:   test's l2:136.723 
[99]:   test's l2:136.669 
[100]:  test's l2:136.689 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.065158 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.655 
[8]:    test's l2:143.333 
[9]:    test's l2:143.307 
[10]:   test's l2:143.031 
[11]:   test's l2:142.688 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.413 
[18]:   test's l2:141.308 
[19]:   test's l2:141.192 
[20]:   test's l2:140.951 
[21]:   test's l2:140.854 
[22]:   test's l2:140.726 
[23]:   test's l2:140.547 
[24]:   test's l2:140.396 
[25]:   test's l2:140.278 
[26]:   test's l2:140.164 
[27]:   test's l2:140.111 
[28]:   test's l2:139.966 
[29]:   test's l2:139.886 
[30]:   test's l2:139.69 
[31]:   test's l2:139.534 
[32]:   test's l2:139.464 
[33]:   test's l2:139.471 
[34]:   test's l2:139.363 
[35]:   test's l2:139.336 
[36]:   test's l2:139.264 
[37]:   test's l2:139.138 
[38]:   test's l2:139.156 
[39]:   test's l2:139.177 
[40]:   test's l2:139.148 
[41]:   test's l2:139.064 
[42]:   test's l2:138.931 
[43]:   test's l2:138.824 
[44]:   test's l2:138.78 
[45]:   test's l2:138.729 
[46]:   test's l2:138.691 
[47]:   test's l2:138.697 
[48]:   test's l2:138.565 
[49]:   test's l2:138.524 
[50]:   test's l2:138.499 
[51]:   test's l2:138.477 
[52]:   test's l2:138.373 
[53]:   test's l2:138.351 
[54]:   test's l2:138.343 
[55]:   test's l2:138.341 
[56]:   test's l2:138.313 
[57]:   test's l2:138.243 
[58]:   test's l2:138.235 
[59]:   test's l2:138.15 
[60]:   test's l2:138.086 
[61]:   test's l2:137.991 
[62]:   test's l2:137.98 
[63]:   test's l2:137.909 
[64]:   test's l2:137.816 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.581 
[68]:   test's l2:137.551 
[69]:   test's l2:137.512 
[70]:   test's l2:137.514 
[71]:   test's l2:137.475 
[72]:   test's l2:137.436 
[73]:   test's l2:137.437 
[74]:   test's l2:137.389 
[75]:   test's l2:137.354 
[76]:   test's l2:137.375 
[77]:   test's l2:137.326 
[78]:   test's l2:137.357 
[79]:   test's l2:137.261 
[80]:   test's l2:137.196 
[81]:   test's l2:137.14 
[82]:   test's l2:137.138 
[83]:   test's l2:137.102 
[84]:   test's l2:137.095 
[85]:   test's l2:137.045 
[86]:   test's l2:137.05 
[87]:   test's l2:137.091 
[88]:   test's l2:137.013 
[89]:   test's l2:137.034 
[90]:   test's l2:137.041 
[91]:   test's l2:136.975 
[92]:   test's l2:136.919 
[93]:   test's l2:136.929 
[94]:   test's l2:136.871 
[95]:   test's l2:136.825 
[96]:   test's l2:136.775 
[97]:   test's l2:136.772 
[98]:   test's l2:136.723 
[99]:   test's l2:136.669 
[100]:  test's l2:136.689 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.043199 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.655 
[8]:    test's l2:143.333 
[9]:    test's l2:143.307 
[10]:   test's l2:143.031 
[11]:   test's l2:142.688 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.413 
[18]:   test's l2:141.308 
[19]:   test's l2:141.192 
[20]:   test's l2:140.951 
[21]:   test's l2:140.854 
[22]:   test's l2:140.726 
[23]:   test's l2:140.547 
[24]:   test's l2:140.396 
[25]:   test's l2:140.279 
[26]:   test's l2:140.164 
[27]:   test's l2:140.111 
[28]:   test's l2:139.966 
[29]:   test's l2:139.886 
[30]:   test's l2:139.69 
[31]:   test's l2:139.534 
[32]:   test's l2:139.465 
[33]:   test's l2:139.471 
[34]:   test's l2:139.364 
[35]:   test's l2:139.336 
[36]:   test's l2:139.264 
[37]:   test's l2:139.138 
[38]:   test's l2:139.156 
[39]:   test's l2:139.177 
[40]:   test's l2:139.148 
[41]:   test's l2:139.064 
[42]:   test's l2:138.931 
[43]:   test's l2:138.824 
[44]:   test's l2:138.829 
[45]:   test's l2:138.787 
[46]:   test's l2:138.736 
[47]:   test's l2:138.698 
[48]:   test's l2:138.566 
[49]:   test's l2:138.526 
[50]:   test's l2:138.501 
[51]:   test's l2:138.478 
[52]:   test's l2:138.374 
[53]:   test's l2:138.353 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.236 
[59]:   test's l2:138.151 
[60]:   test's l2:138.087 
[61]:   test's l2:137.991 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.816 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.581 
[68]:   test's l2:137.551 
[69]:   test's l2:137.512 
[70]:   test's l2:137.514 
[71]:   test's l2:137.475 
[72]:   test's l2:137.475 
[73]:   test's l2:137.436 
[74]:   test's l2:137.388 
[75]:   test's l2:137.353 
[76]:   test's l2:137.375 
[77]:   test's l2:137.326 
[78]:   test's l2:137.357 
[79]:   test's l2:137.26 
[80]:   test's l2:137.196 
[81]:   test's l2:137.14 
[82]:   test's l2:137.137 
[83]:   test's l2:137.102 
[84]:   test's l2:137.094 
[85]:   test's l2:137.044 
[86]:   test's l2:137.049 
[87]:   test's l2:137.09 
[88]:   test's l2:137.013 
[89]:   test's l2:137.033 
[90]:   test's l2:137.04 
[91]:   test's l2:136.974 
[92]:   test's l2:136.918 
[93]:   test's l2:136.928 
[94]:   test's l2:136.87 
[95]:   test's l2:136.824 
[96]:   test's l2:136.774 
[97]:   test's l2:136.771 
[98]:   test's l2:136.716 
[99]:   test's l2:136.721 
[100]:  test's l2:136.68 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing col-wise multi-threading, the overhead of testing was 0.044371 seconds.
You can set `force_col_wise=true` to remove the overhead.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.655 
[8]:    test's l2:143.333 
[9]:    test's l2:143.307 
[10]:   test's l2:143.031 
[11]:   test's l2:142.688 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.413 
[18]:   test's l2:141.308 
[19]:   test's l2:141.192 
[20]:   test's l2:140.951 
[21]:   test's l2:140.854 
[22]:   test's l2:140.726 
[23]:   test's l2:140.547 
[24]:   test's l2:140.396 
[25]:   test's l2:140.279 
[26]:   test's l2:140.164 
[27]:   test's l2:140.111 
[28]:   test's l2:139.966 
[29]:   test's l2:139.886 
[30]:   test's l2:139.69 
[31]:   test's l2:139.534 
[32]:   test's l2:139.465 
[33]:   test's l2:139.471 
[34]:   test's l2:139.364 
[35]:   test's l2:139.336 
[36]:   test's l2:139.264 
[37]:   test's l2:139.138 
[38]:   test's l2:139.156 
[39]:   test's l2:139.177 
[40]:   test's l2:139.148 
[41]:   test's l2:139.064 
[42]:   test's l2:138.931 
[43]:   test's l2:138.824 
[44]:   test's l2:138.829 
[45]:   test's l2:138.787 
[46]:   test's l2:138.736 
[47]:   test's l2:138.698 
[48]:   test's l2:138.566 
[49]:   test's l2:138.526 
[50]:   test's l2:138.501 
[51]:   test's l2:138.478 
[52]:   test's l2:138.374 
[53]:   test's l2:138.353 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.236 
[59]:   test's l2:138.151 
[60]:   test's l2:138.087 
[61]:   test's l2:137.991 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.816 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.581 
[68]:   test's l2:137.551 
[69]:   test's l2:137.512 
[70]:   test's l2:137.514 
[71]:   test's l2:137.475 
[72]:   test's l2:137.475 
[73]:   test's l2:137.436 
[74]:   test's l2:137.388 
[75]:   test's l2:137.353 
[76]:   test's l2:137.375 
[77]:   test's l2:137.326 
[78]:   test's l2:137.357 
[79]:   test's l2:137.26 
[80]:   test's l2:137.196 
[81]:   test's l2:137.14 
[82]:   test's l2:137.137 
[83]:   test's l2:137.102 
[84]:   test's l2:137.094 
[85]:   test's l2:137.044 
[86]:   test's l2:137.049 
[87]:   test's l2:137.09 
[88]:   test's l2:137.013 
[89]:   test's l2:137.033 
[90]:   test's l2:137.04 
[91]:   test's l2:136.974 
[92]:   test's l2:136.918 
[93]:   test's l2:136.928 
[94]:   test's l2:136.87 
[95]:   test's l2:136.824 
[96]:   test's l2:136.774 
[97]:   test's l2:136.771 
[98]:   test's l2:136.716 
[99]:   test's l2:136.721 
[100]:  test's l2:136.68 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.062597 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.655 
[8]:    test's l2:143.333 
[9]:    test's l2:143.307 
[10]:   test's l2:143.031 
[11]:   test's l2:142.688 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.413 
[18]:   test's l2:141.308 
[19]:   test's l2:141.192 
[20]:   test's l2:140.951 
[21]:   test's l2:140.854 
[22]:   test's l2:140.726 
[23]:   test's l2:140.547 
[24]:   test's l2:140.396 
[25]:   test's l2:140.279 
[26]:   test's l2:140.164 
[27]:   test's l2:140.112 
[28]:   test's l2:139.966 
[29]:   test's l2:139.886 
[30]:   test's l2:139.69 
[31]:   test's l2:139.535 
[32]:   test's l2:139.465 
[33]:   test's l2:139.471 
[34]:   test's l2:139.364 
[35]:   test's l2:139.336 
[36]:   test's l2:139.217 
[37]:   test's l2:139.14 
[38]:   test's l2:139.158 
[39]:   test's l2:139.179 
[40]:   test's l2:139.149 
[41]:   test's l2:139.066 
[42]:   test's l2:138.932 
[43]:   test's l2:138.826 
[44]:   test's l2:138.83 
[45]:   test's l2:138.788 
[46]:   test's l2:138.737 
[47]:   test's l2:138.7 
[48]:   test's l2:138.568 
[49]:   test's l2:138.528 
[50]:   test's l2:138.502 
[51]:   test's l2:138.48 
[52]:   test's l2:138.375 
[53]:   test's l2:138.354 
[54]:   test's l2:138.345 
[55]:   test's l2:138.317 
[56]:   test's l2:138.316 
[57]:   test's l2:138.246 
[58]:   test's l2:138.237 
[59]:   test's l2:138.152 
[60]:   test's l2:138.088 
[61]:   test's l2:137.991 
[62]:   test's l2:137.98 
[63]:   test's l2:137.91 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.675 
[67]:   test's l2:137.582 
[68]:   test's l2:137.552 
[69]:   test's l2:137.513 
[70]:   test's l2:137.515 
[71]:   test's l2:137.475 
[72]:   test's l2:137.475 
[73]:   test's l2:137.451 
[74]:   test's l2:137.431 
[75]:   test's l2:137.383 
[76]:   test's l2:137.413 
[77]:   test's l2:137.375 
[78]:   test's l2:137.394 
[79]:   test's l2:137.3 
[80]:   test's l2:137.307 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.335 
[85]:   test's l2:137.26 
[86]:   test's l2:137.192 
[87]:   test's l2:137.182 
[88]:   test's l2:137.116 
[89]:   test's l2:137.135 
[90]:   test's l2:137.13 
[91]:   test's l2:137.124 
[92]:   test's l2:137.136 
[93]:   test's l2:137.148 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.057098 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.399 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.655 
[8]:    test's l2:143.333 
[9]:    test's l2:143.307 
[10]:   test's l2:143.031 
[11]:   test's l2:142.688 
[12]:   test's l2:142.381 
[13]:   test's l2:142.109 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.413 
[18]:   test's l2:141.308 
[19]:   test's l2:141.192 
[20]:   test's l2:140.951 
[21]:   test's l2:140.854 
[22]:   test's l2:140.726 
[23]:   test's l2:140.547 
[24]:   test's l2:140.396 
[25]:   test's l2:140.279 
[26]:   test's l2:140.164 
[27]:   test's l2:140.112 
[28]:   test's l2:139.966 
[29]:   test's l2:139.886 
[30]:   test's l2:139.69 
[31]:   test's l2:139.535 
[32]:   test's l2:139.465 
[33]:   test's l2:139.471 
[34]:   test's l2:139.364 
[35]:   test's l2:139.336 
[36]:   test's l2:139.217 
[37]:   test's l2:139.14 
[38]:   test's l2:139.158 
[39]:   test's l2:139.179 
[40]:   test's l2:139.149 
[41]:   test's l2:139.066 
[42]:   test's l2:138.932 
[43]:   test's l2:138.826 
[44]:   test's l2:138.83 
[45]:   test's l2:138.788 
[46]:   test's l2:138.737 
[47]:   test's l2:138.7 
[48]:   test's l2:138.568 
[49]:   test's l2:138.528 
[50]:   test's l2:138.502 
[51]:   test's l2:138.48 
[52]:   test's l2:138.375 
[53]:   test's l2:138.354 
[54]:   test's l2:138.345 
[55]:   test's l2:138.317 
[56]:   test's l2:138.316 
[57]:   test's l2:138.246 
[58]:   test's l2:138.237 
[59]:   test's l2:138.152 
[60]:   test's l2:138.088 
[61]:   test's l2:137.991 
[62]:   test's l2:137.98 
[63]:   test's l2:137.91 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.675 
[67]:   test's l2:137.582 
[68]:   test's l2:137.552 
[69]:   test's l2:137.513 
[70]:   test's l2:137.515 
[71]:   test's l2:137.475 
[72]:   test's l2:137.475 
[73]:   test's l2:137.451 
[74]:   test's l2:137.431 
[75]:   test's l2:137.383 
[76]:   test's l2:137.413 
[77]:   test's l2:137.375 
[78]:   test's l2:137.394 
[79]:   test's l2:137.3 
[80]:   test's l2:137.307 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.335 
[85]:   test's l2:137.26 
[86]:   test's l2:137.192 
[87]:   test's l2:137.182 
[88]:   test's l2:137.116 
[89]:   test's l2:137.135 
[90]:   test's l2:137.13 
[91]:   test's l2:137.124 
[92]:   test's l2:137.136 
[93]:   test's l2:137.148 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.063447 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.333 
[9]:    test's l2:143.307 
[10]:   test's l2:143.031 
[11]:   test's l2:142.688 
[12]:   test's l2:142.381 
[13]:   test's l2:142.11 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.416 
[18]:   test's l2:141.311 
[19]:   test's l2:141.195 
[20]:   test's l2:140.954 
[21]:   test's l2:140.857 
[22]:   test's l2:140.729 
[23]:   test's l2:140.55 
[24]:   test's l2:140.399 
[25]:   test's l2:140.282 
[26]:   test's l2:140.167 
[27]:   test's l2:140.115 
[28]:   test's l2:139.969 
[29]:   test's l2:139.889 
[30]:   test's l2:139.694 
[31]:   test's l2:139.539 
[32]:   test's l2:139.469 
[33]:   test's l2:139.475 
[34]:   test's l2:139.368 
[35]:   test's l2:139.34 
[36]:   test's l2:139.221 
[37]:   test's l2:139.144 
[38]:   test's l2:139.162 
[39]:   test's l2:139.132 
[40]:   test's l2:139.049 
[41]:   test's l2:139.068 
[42]:   test's l2:138.935 
[43]:   test's l2:138.827 
[44]:   test's l2:138.783 
[45]:   test's l2:138.732 
[46]:   test's l2:138.694 
[47]:   test's l2:138.7 
[48]:   test's l2:138.568 
[49]:   test's l2:138.528 
[50]:   test's l2:138.502 
[51]:   test's l2:138.479 
[52]:   test's l2:138.375 
[53]:   test's l2:138.353 
[54]:   test's l2:138.344 
[55]:   test's l2:138.316 
[56]:   test's l2:138.315 
[57]:   test's l2:138.245 
[58]:   test's l2:138.236 
[59]:   test's l2:138.151 
[60]:   test's l2:138.087 
[61]:   test's l2:137.991 
[62]:   test's l2:137.98 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.675 
[67]:   test's l2:137.644 
[68]:   test's l2:137.606 
[69]:   test's l2:137.514 
[70]:   test's l2:137.515 
[71]:   test's l2:137.476 
[72]:   test's l2:137.437 
[73]:   test's l2:137.389 
[74]:   test's l2:137.354 
[75]:   test's l2:137.293 
[76]:   test's l2:137.188 
[77]:   test's l2:137.134 
[78]:   test's l2:137.165 
[79]:   test's l2:137.186 
[80]:   test's l2:137.129 
[81]:   test's l2:137.122 
[82]:   test's l2:137.119 
[83]:   test's l2:137.084 
[84]:   test's l2:137.034 
[85]:   test's l2:137.039 
[86]:   test's l2:137.079 
[87]:   test's l2:137.089 
[88]:   test's l2:137.031 
[89]:   test's l2:136.953 
[90]:   test's l2:136.972 
[91]:   test's l2:136.978 
[92]:   test's l2:136.944 
[93]:   test's l2:136.946 
[94]:   test's l2:136.893 
[95]:   test's l2:136.825 
[96]:   test's l2:136.814 
[97]:   test's l2:136.766 
[98]:   test's l2:136.763 
[99]:   test's l2:136.707 
[100]:  test's l2:136.725 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.050719 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.333 
[9]:    test's l2:143.307 
[10]:   test's l2:143.031 
[11]:   test's l2:142.688 
[12]:   test's l2:142.381 
[13]:   test's l2:142.11 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.416 
[18]:   test's l2:141.311 
[19]:   test's l2:141.195 
[20]:   test's l2:140.954 
[21]:   test's l2:140.857 
[22]:   test's l2:140.729 
[23]:   test's l2:140.55 
[24]:   test's l2:140.399 
[25]:   test's l2:140.282 
[26]:   test's l2:140.167 
[27]:   test's l2:140.115 
[28]:   test's l2:139.969 
[29]:   test's l2:139.889 
[30]:   test's l2:139.694 
[31]:   test's l2:139.539 
[32]:   test's l2:139.469 
[33]:   test's l2:139.475 
[34]:   test's l2:139.368 
[35]:   test's l2:139.34 
[36]:   test's l2:139.221 
[37]:   test's l2:139.144 
[38]:   test's l2:139.162 
[39]:   test's l2:139.132 
[40]:   test's l2:139.049 
[41]:   test's l2:139.068 
[42]:   test's l2:138.935 
[43]:   test's l2:138.827 
[44]:   test's l2:138.783 
[45]:   test's l2:138.732 
[46]:   test's l2:138.694 
[47]:   test's l2:138.7 
[48]:   test's l2:138.568 
[49]:   test's l2:138.528 
[50]:   test's l2:138.502 
[51]:   test's l2:138.479 
[52]:   test's l2:138.375 
[53]:   test's l2:138.353 
[54]:   test's l2:138.344 
[55]:   test's l2:138.316 
[56]:   test's l2:138.315 
[57]:   test's l2:138.245 
[58]:   test's l2:138.236 
[59]:   test's l2:138.151 
[60]:   test's l2:138.087 
[61]:   test's l2:137.991 
[62]:   test's l2:137.98 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.675 
[67]:   test's l2:137.644 
[68]:   test's l2:137.606 
[69]:   test's l2:137.514 
[70]:   test's l2:137.515 
[71]:   test's l2:137.476 
[72]:   test's l2:137.437 
[73]:   test's l2:137.389 
[74]:   test's l2:137.354 
[75]:   test's l2:137.293 
[76]:   test's l2:137.188 
[77]:   test's l2:137.134 
[78]:   test's l2:137.165 
[79]:   test's l2:137.186 
[80]:   test's l2:137.129 
[81]:   test's l2:137.122 
[82]:   test's l2:137.119 
[83]:   test's l2:137.084 
[84]:   test's l2:137.034 
[85]:   test's l2:137.039 
[86]:   test's l2:137.079 
[87]:   test's l2:137.089 
[88]:   test's l2:137.031 
[89]:   test's l2:136.953 
[90]:   test's l2:136.972 
[91]:   test's l2:136.978 
[92]:   test's l2:136.944 
[93]:   test's l2:136.946 
[94]:   test's l2:136.893 
[95]:   test's l2:136.825 
[96]:   test's l2:136.814 
[97]:   test's l2:136.766 
[98]:   test's l2:136.763 
[99]:   test's l2:136.707 
[100]:  test's l2:136.725 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.058594 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.333 
[9]:    test's l2:143.307 
[10]:   test's l2:143.031 
[11]:   test's l2:142.688 
[12]:   test's l2:142.381 
[13]:   test's l2:142.11 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.416 
[18]:   test's l2:141.311 
[19]:   test's l2:141.195 
[20]:   test's l2:140.954 
[21]:   test's l2:140.857 
[22]:   test's l2:140.729 
[23]:   test's l2:140.55 
[24]:   test's l2:140.399 
[25]:   test's l2:140.282 
[26]:   test's l2:140.168 
[27]:   test's l2:140.115 
[28]:   test's l2:139.969 
[29]:   test's l2:139.889 
[30]:   test's l2:139.695 
[31]:   test's l2:139.539 
[32]:   test's l2:139.47 
[33]:   test's l2:139.476 
[34]:   test's l2:139.368 
[35]:   test's l2:139.34 
[36]:   test's l2:139.221 
[37]:   test's l2:139.144 
[38]:   test's l2:139.162 
[39]:   test's l2:139.132 
[40]:   test's l2:139.049 
[41]:   test's l2:139.068 
[42]:   test's l2:138.935 
[43]:   test's l2:138.827 
[44]:   test's l2:138.783 
[45]:   test's l2:138.732 
[46]:   test's l2:138.694 
[47]:   test's l2:138.7 
[48]:   test's l2:138.569 
[49]:   test's l2:138.528 
[50]:   test's l2:138.502 
[51]:   test's l2:138.479 
[52]:   test's l2:138.375 
[53]:   test's l2:138.353 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.236 
[59]:   test's l2:138.151 
[60]:   test's l2:138.087 
[61]:   test's l2:137.99 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.606 
[69]:   test's l2:137.514 
[70]:   test's l2:137.515 
[71]:   test's l2:137.476 
[72]:   test's l2:137.451 
[73]:   test's l2:137.431 
[74]:   test's l2:137.384 
[75]:   test's l2:137.414 
[76]:   test's l2:137.376 
[77]:   test's l2:137.375 
[78]:   test's l2:137.395 
[79]:   test's l2:137.3 
[80]:   test's l2:137.308 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.335 
[85]:   test's l2:137.259 
[86]:   test's l2:137.192 
[87]:   test's l2:137.182 
[88]:   test's l2:137.115 
[89]:   test's l2:137.135 
[90]:   test's l2:137.129 
[91]:   test's l2:137.124 
[92]:   test's l2:137.135 
[93]:   test's l2:137.146 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.048571 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.333 
[9]:    test's l2:143.307 
[10]:   test's l2:143.031 
[11]:   test's l2:142.688 
[12]:   test's l2:142.381 
[13]:   test's l2:142.11 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.416 
[18]:   test's l2:141.311 
[19]:   test's l2:141.195 
[20]:   test's l2:140.954 
[21]:   test's l2:140.857 
[22]:   test's l2:140.729 
[23]:   test's l2:140.55 
[24]:   test's l2:140.399 
[25]:   test's l2:140.282 
[26]:   test's l2:140.168 
[27]:   test's l2:140.115 
[28]:   test's l2:139.969 
[29]:   test's l2:139.889 
[30]:   test's l2:139.695 
[31]:   test's l2:139.539 
[32]:   test's l2:139.47 
[33]:   test's l2:139.476 
[34]:   test's l2:139.368 
[35]:   test's l2:139.34 
[36]:   test's l2:139.221 
[37]:   test's l2:139.144 
[38]:   test's l2:139.162 
[39]:   test's l2:139.132 
[40]:   test's l2:139.049 
[41]:   test's l2:139.068 
[42]:   test's l2:138.935 
[43]:   test's l2:138.827 
[44]:   test's l2:138.783 
[45]:   test's l2:138.732 
[46]:   test's l2:138.694 
[47]:   test's l2:138.7 
[48]:   test's l2:138.569 
[49]:   test's l2:138.528 
[50]:   test's l2:138.502 
[51]:   test's l2:138.479 
[52]:   test's l2:138.375 
[53]:   test's l2:138.353 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.236 
[59]:   test's l2:138.151 
[60]:   test's l2:138.087 
[61]:   test's l2:137.99 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.606 
[69]:   test's l2:137.514 
[70]:   test's l2:137.515 
[71]:   test's l2:137.476 
[72]:   test's l2:137.451 
[73]:   test's l2:137.431 
[74]:   test's l2:137.384 
[75]:   test's l2:137.414 
[76]:   test's l2:137.376 
[77]:   test's l2:137.375 
[78]:   test's l2:137.395 
[79]:   test's l2:137.3 
[80]:   test's l2:137.308 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.335 
[85]:   test's l2:137.259 
[86]:   test's l2:137.192 
[87]:   test's l2:137.182 
[88]:   test's l2:137.115 
[89]:   test's l2:137.135 
[90]:   test's l2:137.129 
[91]:   test's l2:137.124 
[92]:   test's l2:137.135 
[93]:   test's l2:137.146 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.044882 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.333 
[9]:    test's l2:143.307 
[10]:   test's l2:143.03 
[11]:   test's l2:142.687 
[12]:   test's l2:142.381 
[13]:   test's l2:142.11 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.416 
[18]:   test's l2:141.312 
[19]:   test's l2:141.195 
[20]:   test's l2:140.954 
[21]:   test's l2:140.857 
[22]:   test's l2:140.729 
[23]:   test's l2:140.55 
[24]:   test's l2:140.4 
[25]:   test's l2:140.282 
[26]:   test's l2:140.168 
[27]:   test's l2:140.115 
[28]:   test's l2:139.969 
[29]:   test's l2:139.89 
[30]:   test's l2:139.695 
[31]:   test's l2:139.54 
[32]:   test's l2:139.47 
[33]:   test's l2:139.476 
[34]:   test's l2:139.369 
[35]:   test's l2:139.341 
[36]:   test's l2:139.221 
[37]:   test's l2:139.145 
[38]:   test's l2:139.162 
[39]:   test's l2:139.132 
[40]:   test's l2:139.049 
[41]:   test's l2:139.068 
[42]:   test's l2:138.936 
[43]:   test's l2:138.828 
[44]:   test's l2:138.784 
[45]:   test's l2:138.732 
[46]:   test's l2:138.694 
[47]:   test's l2:138.7 
[48]:   test's l2:138.569 
[49]:   test's l2:138.528 
[50]:   test's l2:138.502 
[51]:   test's l2:138.479 
[52]:   test's l2:138.375 
[53]:   test's l2:138.353 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.236 
[59]:   test's l2:138.151 
[60]:   test's l2:138.086 
[61]:   test's l2:137.99 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.605 
[69]:   test's l2:137.514 
[70]:   test's l2:137.515 
[71]:   test's l2:137.476 
[72]:   test's l2:137.451 
[73]:   test's l2:137.431 
[74]:   test's l2:137.384 
[75]:   test's l2:137.414 
[76]:   test's l2:137.376 
[77]:   test's l2:137.375 
[78]:   test's l2:137.395 
[79]:   test's l2:137.3 
[80]:   test's l2:137.308 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.334 
[85]:   test's l2:137.259 
[86]:   test's l2:137.191 
[87]:   test's l2:137.181 
[88]:   test's l2:137.115 
[89]:   test's l2:137.134 
[90]:   test's l2:137.129 
[91]:   test's l2:137.123 
[92]:   test's l2:137.134 
[93]:   test's l2:137.145 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing col-wise multi-threading, the overhead of testing was 0.049022 seconds.
You can set `force_col_wise=true` to remove the overhead.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.333 
[9]:    test's l2:143.307 
[10]:   test's l2:143.03 
[11]:   test's l2:142.687 
[12]:   test's l2:142.381 
[13]:   test's l2:142.11 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.416 
[18]:   test's l2:141.312 
[19]:   test's l2:141.195 
[20]:   test's l2:140.954 
[21]:   test's l2:140.857 
[22]:   test's l2:140.729 
[23]:   test's l2:140.55 
[24]:   test's l2:140.4 
[25]:   test's l2:140.282 
[26]:   test's l2:140.168 
[27]:   test's l2:140.115 
[28]:   test's l2:139.969 
[29]:   test's l2:139.89 
[30]:   test's l2:139.695 
[31]:   test's l2:139.54 
[32]:   test's l2:139.47 
[33]:   test's l2:139.476 
[34]:   test's l2:139.369 
[35]:   test's l2:139.341 
[36]:   test's l2:139.221 
[37]:   test's l2:139.145 
[38]:   test's l2:139.162 
[39]:   test's l2:139.132 
[40]:   test's l2:139.049 
[41]:   test's l2:139.068 
[42]:   test's l2:138.936 
[43]:   test's l2:138.828 
[44]:   test's l2:138.784 
[45]:   test's l2:138.732 
[46]:   test's l2:138.694 
[47]:   test's l2:138.7 
[48]:   test's l2:138.569 
[49]:   test's l2:138.528 
[50]:   test's l2:138.502 
[51]:   test's l2:138.479 
[52]:   test's l2:138.375 
[53]:   test's l2:138.353 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.236 
[59]:   test's l2:138.151 
[60]:   test's l2:138.086 
[61]:   test's l2:137.99 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.605 
[69]:   test's l2:137.514 
[70]:   test's l2:137.515 
[71]:   test's l2:137.476 
[72]:   test's l2:137.451 
[73]:   test's l2:137.431 
[74]:   test's l2:137.384 
[75]:   test's l2:137.414 
[76]:   test's l2:137.376 
[77]:   test's l2:137.375 
[78]:   test's l2:137.395 
[79]:   test's l2:137.3 
[80]:   test's l2:137.308 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.334 
[85]:   test's l2:137.259 
[86]:   test's l2:137.191 
[87]:   test's l2:137.181 
[88]:   test's l2:137.115 
[89]:   test's l2:137.134 
[90]:   test's l2:137.129 
[91]:   test's l2:137.123 
[92]:   test's l2:137.134 
[93]:   test's l2:137.145 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.034900 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.333 
[9]:    test's l2:143.306 
[10]:   test's l2:143.03 
[11]:   test's l2:142.687 
[12]:   test's l2:142.381 
[13]:   test's l2:142.11 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.416 
[18]:   test's l2:141.312 
[19]:   test's l2:141.195 
[20]:   test's l2:140.955 
[21]:   test's l2:140.857 
[22]:   test's l2:140.729 
[23]:   test's l2:140.551 
[24]:   test's l2:140.4 
[25]:   test's l2:140.282 
[26]:   test's l2:140.168 
[27]:   test's l2:140.115 
[28]:   test's l2:139.97 
[29]:   test's l2:139.89 
[30]:   test's l2:139.696 
[31]:   test's l2:139.54 
[32]:   test's l2:139.471 
[33]:   test's l2:139.476 
[34]:   test's l2:139.369 
[35]:   test's l2:139.341 
[36]:   test's l2:139.222 
[37]:   test's l2:139.145 
[38]:   test's l2:139.162 
[39]:   test's l2:139.133 
[40]:   test's l2:139.05 
[41]:   test's l2:139.069 
[42]:   test's l2:138.936 
[43]:   test's l2:138.828 
[44]:   test's l2:138.833 
[45]:   test's l2:138.79 
[46]:   test's l2:138.739 
[47]:   test's l2:138.702 
[48]:   test's l2:138.57 
[49]:   test's l2:138.53 
[50]:   test's l2:138.504 
[51]:   test's l2:138.481 
[52]:   test's l2:138.376 
[53]:   test's l2:138.354 
[54]:   test's l2:138.345 
[55]:   test's l2:138.316 
[56]:   test's l2:138.315 
[57]:   test's l2:138.245 
[58]:   test's l2:138.236 
[59]:   test's l2:138.152 
[60]:   test's l2:138.087 
[61]:   test's l2:137.99 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.816 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.605 
[69]:   test's l2:137.607 
[70]:   test's l2:137.568 
[71]:   test's l2:137.476 
[72]:   test's l2:137.451 
[73]:   test's l2:137.431 
[74]:   test's l2:137.383 
[75]:   test's l2:137.414 
[76]:   test's l2:137.413 
[77]:   test's l2:137.375 
[78]:   test's l2:137.394 
[79]:   test's l2:137.3 
[80]:   test's l2:137.308 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.334 
[85]:   test's l2:137.258 
[86]:   test's l2:137.191 
[87]:   test's l2:137.181 
[88]:   test's l2:137.115 
[89]:   test's l2:137.134 
[90]:   test's l2:137.128 
[91]:   test's l2:137.123 
[92]:   test's l2:137.134 
[93]:   test's l2:137.078 
[94]:   test's l2:137.024 
[95]:   test's l2:136.953 
[96]:   test's l2:136.96 
[97]:   test's l2:136.97 
[98]:   test's l2:136.933 
[99]:   test's l2:136.95 
[100]:  test's l2:136.928 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.033482 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.333 
[9]:    test's l2:143.306 
[10]:   test's l2:143.03 
[11]:   test's l2:142.687 
[12]:   test's l2:142.381 
[13]:   test's l2:142.11 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.416 
[18]:   test's l2:141.312 
[19]:   test's l2:141.195 
[20]:   test's l2:140.955 
[21]:   test's l2:140.857 
[22]:   test's l2:140.729 
[23]:   test's l2:140.551 
[24]:   test's l2:140.4 
[25]:   test's l2:140.282 
[26]:   test's l2:140.168 
[27]:   test's l2:140.115 
[28]:   test's l2:139.97 
[29]:   test's l2:139.89 
[30]:   test's l2:139.696 
[31]:   test's l2:139.54 
[32]:   test's l2:139.471 
[33]:   test's l2:139.476 
[34]:   test's l2:139.369 
[35]:   test's l2:139.341 
[36]:   test's l2:139.222 
[37]:   test's l2:139.145 
[38]:   test's l2:139.162 
[39]:   test's l2:139.133 
[40]:   test's l2:139.05 
[41]:   test's l2:139.069 
[42]:   test's l2:138.936 
[43]:   test's l2:138.828 
[44]:   test's l2:138.833 
[45]:   test's l2:138.79 
[46]:   test's l2:138.739 
[47]:   test's l2:138.702 
[48]:   test's l2:138.57 
[49]:   test's l2:138.53 
[50]:   test's l2:138.504 
[51]:   test's l2:138.481 
[52]:   test's l2:138.376 
[53]:   test's l2:138.354 
[54]:   test's l2:138.345 
[55]:   test's l2:138.316 
[56]:   test's l2:138.315 
[57]:   test's l2:138.245 
[58]:   test's l2:138.236 
[59]:   test's l2:138.152 
[60]:   test's l2:138.087 
[61]:   test's l2:137.99 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.816 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.605 
[69]:   test's l2:137.607 
[70]:   test's l2:137.568 
[71]:   test's l2:137.476 
[72]:   test's l2:137.451 
[73]:   test's l2:137.431 
[74]:   test's l2:137.383 
[75]:   test's l2:137.414 
[76]:   test's l2:137.413 
[77]:   test's l2:137.375 
[78]:   test's l2:137.394 
[79]:   test's l2:137.3 
[80]:   test's l2:137.308 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.334 
[85]:   test's l2:137.258 
[86]:   test's l2:137.191 
[87]:   test's l2:137.181 
[88]:   test's l2:137.115 
[89]:   test's l2:137.134 
[90]:   test's l2:137.128 
[91]:   test's l2:137.123 
[92]:   test's l2:137.134 
[93]:   test's l2:137.078 
[94]:   test's l2:137.024 
[95]:   test's l2:136.953 
[96]:   test's l2:136.96 
[97]:   test's l2:136.97 
[98]:   test's l2:136.933 
[99]:   test's l2:136.95 
[100]:  test's l2:136.928 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.076444 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.333 
[9]:    test's l2:143.306 
[10]:   test's l2:143.03 
[11]:   test's l2:142.687 
[12]:   test's l2:142.381 
[13]:   test's l2:142.11 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.416 
[18]:   test's l2:141.312 
[19]:   test's l2:141.195 
[20]:   test's l2:140.955 
[21]:   test's l2:140.857 
[22]:   test's l2:140.73 
[23]:   test's l2:140.551 
[24]:   test's l2:140.4 
[25]:   test's l2:140.283 
[26]:   test's l2:140.168 
[27]:   test's l2:140.115 
[28]:   test's l2:139.97 
[29]:   test's l2:139.89 
[30]:   test's l2:139.696 
[31]:   test's l2:139.541 
[32]:   test's l2:139.471 
[33]:   test's l2:139.477 
[34]:   test's l2:139.369 
[35]:   test's l2:139.341 
[36]:   test's l2:139.222 
[37]:   test's l2:139.146 
[38]:   test's l2:139.162 
[39]:   test's l2:139.133 
[40]:   test's l2:139.05 
[41]:   test's l2:139.069 
[42]:   test's l2:138.936 
[43]:   test's l2:138.828 
[44]:   test's l2:138.833 
[45]:   test's l2:138.79 
[46]:   test's l2:138.739 
[47]:   test's l2:138.702 
[48]:   test's l2:138.571 
[49]:   test's l2:138.53 
[50]:   test's l2:138.504 
[51]:   test's l2:138.481 
[52]:   test's l2:138.376 
[53]:   test's l2:138.354 
[54]:   test's l2:138.345 
[55]:   test's l2:138.316 
[56]:   test's l2:138.315 
[57]:   test's l2:138.245 
[58]:   test's l2:138.236 
[59]:   test's l2:138.151 
[60]:   test's l2:138.087 
[61]:   test's l2:137.99 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.816 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.605 
[69]:   test's l2:137.607 
[70]:   test's l2:137.568 
[71]:   test's l2:137.476 
[72]:   test's l2:137.451 
[73]:   test's l2:137.431 
[74]:   test's l2:137.383 
[75]:   test's l2:137.414 
[76]:   test's l2:137.413 
[77]:   test's l2:137.375 
[78]:   test's l2:137.394 
[79]:   test's l2:137.3 
[80]:   test's l2:137.307 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.333 
[85]:   test's l2:137.258 
[86]:   test's l2:137.191 
[87]:   test's l2:137.181 
[88]:   test's l2:137.115 
[89]:   test's l2:137.134 
[90]:   test's l2:137.128 
[91]:   test's l2:137.122 
[92]:   test's l2:137.133 
[93]:   test's l2:137.077 
[94]:   test's l2:137.024 
[95]:   test's l2:136.953 
[96]:   test's l2:136.96 
[97]:   test's l2:136.969 
[98]:   test's l2:136.933 
[99]:   test's l2:136.949 
[100]:  test's l2:136.927 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.056702 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.301 
[2]:    test's l2:147.084 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.333 
[9]:    test's l2:143.306 
[10]:   test's l2:143.03 
[11]:   test's l2:142.687 
[12]:   test's l2:142.381 
[13]:   test's l2:142.11 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.416 
[18]:   test's l2:141.312 
[19]:   test's l2:141.195 
[20]:   test's l2:140.955 
[21]:   test's l2:140.857 
[22]:   test's l2:140.73 
[23]:   test's l2:140.551 
[24]:   test's l2:140.4 
[25]:   test's l2:140.283 
[26]:   test's l2:140.168 
[27]:   test's l2:140.115 
[28]:   test's l2:139.97 
[29]:   test's l2:139.89 
[30]:   test's l2:139.696 
[31]:   test's l2:139.541 
[32]:   test's l2:139.471 
[33]:   test's l2:139.477 
[34]:   test's l2:139.369 
[35]:   test's l2:139.341 
[36]:   test's l2:139.222 
[37]:   test's l2:139.146 
[38]:   test's l2:139.162 
[39]:   test's l2:139.133 
[40]:   test's l2:139.05 
[41]:   test's l2:139.069 
[42]:   test's l2:138.936 
[43]:   test's l2:138.828 
[44]:   test's l2:138.833 
[45]:   test's l2:138.79 
[46]:   test's l2:138.739 
[47]:   test's l2:138.702 
[48]:   test's l2:138.571 
[49]:   test's l2:138.53 
[50]:   test's l2:138.504 
[51]:   test's l2:138.481 
[52]:   test's l2:138.376 
[53]:   test's l2:138.354 
[54]:   test's l2:138.345 
[55]:   test's l2:138.316 
[56]:   test's l2:138.315 
[57]:   test's l2:138.245 
[58]:   test's l2:138.236 
[59]:   test's l2:138.151 
[60]:   test's l2:138.087 
[61]:   test's l2:137.99 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.816 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.605 
[69]:   test's l2:137.607 
[70]:   test's l2:137.568 
[71]:   test's l2:137.476 
[72]:   test's l2:137.451 
[73]:   test's l2:137.431 
[74]:   test's l2:137.383 
[75]:   test's l2:137.414 
[76]:   test's l2:137.413 
[77]:   test's l2:137.375 
[78]:   test's l2:137.394 
[79]:   test's l2:137.3 
[80]:   test's l2:137.307 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.333 
[85]:   test's l2:137.258 
[86]:   test's l2:137.191 
[87]:   test's l2:137.181 
[88]:   test's l2:137.115 
[89]:   test's l2:137.134 
[90]:   test's l2:137.128 
[91]:   test's l2:137.122 
[92]:   test's l2:137.133 
[93]:   test's l2:137.077 
[94]:   test's l2:137.024 
[95]:   test's l2:136.953 
[96]:   test's l2:136.96 
[97]:   test's l2:136.969 
[98]:   test's l2:136.933 
[99]:   test's l2:136.949 
[100]:  test's l2:136.927 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing col-wise multi-threading, the overhead of testing was 0.044611 seconds.
You can set `force_col_wise=true` to remove the overhead.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.3 
[2]:    test's l2:147.083 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.332 
[9]:    test's l2:143.306 
[10]:   test's l2:143.03 
[11]:   test's l2:142.687 
[12]:   test's l2:142.381 
[13]:   test's l2:142.111 
[14]:   test's l2:141.948 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.42 
[18]:   test's l2:141.315 
[19]:   test's l2:141.198 
[20]:   test's l2:140.958 
[21]:   test's l2:140.861 
[22]:   test's l2:140.733 
[23]:   test's l2:140.554 
[24]:   test's l2:140.403 
[25]:   test's l2:140.286 
[26]:   test's l2:140.172 
[27]:   test's l2:140.118 
[28]:   test's l2:139.973 
[29]:   test's l2:139.893 
[30]:   test's l2:139.7 
[31]:   test's l2:139.545 
[32]:   test's l2:139.475 
[33]:   test's l2:139.481 
[34]:   test's l2:139.373 
[35]:   test's l2:139.345 
[36]:   test's l2:139.225 
[37]:   test's l2:139.149 
[38]:   test's l2:139.166 
[39]:   test's l2:139.136 
[40]:   test's l2:139.053 
[41]:   test's l2:139.071 
[42]:   test's l2:138.939 
[43]:   test's l2:138.83 
[44]:   test's l2:138.786 
[45]:   test's l2:138.735 
[46]:   test's l2:138.696 
[47]:   test's l2:138.702 
[48]:   test's l2:138.571 
[49]:   test's l2:138.53 
[50]:   test's l2:138.504 
[51]:   test's l2:138.481 
[52]:   test's l2:138.376 
[53]:   test's l2:138.354 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.235 
[59]:   test's l2:138.151 
[60]:   test's l2:138.086 
[61]:   test's l2:137.99 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.605 
[69]:   test's l2:137.607 
[70]:   test's l2:137.568 
[71]:   test's l2:137.477 
[72]:   test's l2:137.452 
[73]:   test's l2:137.432 
[74]:   test's l2:137.384 
[75]:   test's l2:137.415 
[76]:   test's l2:137.376 
[77]:   test's l2:137.375 
[78]:   test's l2:137.395 
[79]:   test's l2:137.301 
[80]:   test's l2:137.308 
[81]:   test's l2:137.332 
[82]:   test's l2:137.328 
[83]:   test's l2:137.295 
[84]:   test's l2:137.22 
[85]:   test's l2:137.258 
[86]:   test's l2:137.191 
[87]:   test's l2:137.181 
[88]:   test's l2:137.115 
[89]:   test's l2:137.134 
[90]:   test's l2:137.128 
[91]:   test's l2:137.123 
[92]:   test's l2:137.134 
[93]:   test's l2:137.077 
[94]:   test's l2:137.024 
[95]:   test's l2:136.953 
[96]:   test's l2:136.96 
[97]:   test's l2:136.969 
[98]:   test's l2:136.932 
[99]:   test's l2:136.948 
[100]:  test's l2:136.969 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.051120 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.3 
[2]:    test's l2:147.083 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.802 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.332 
[9]:    test's l2:143.306 
[10]:   test's l2:143.03 
[11]:   test's l2:142.687 
[12]:   test's l2:142.381 
[13]:   test's l2:142.111 
[14]:   test's l2:141.948 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.42 
[18]:   test's l2:141.315 
[19]:   test's l2:141.198 
[20]:   test's l2:140.958 
[21]:   test's l2:140.861 
[22]:   test's l2:140.733 
[23]:   test's l2:140.554 
[24]:   test's l2:140.403 
[25]:   test's l2:140.286 
[26]:   test's l2:140.172 
[27]:   test's l2:140.118 
[28]:   test's l2:139.973 
[29]:   test's l2:139.893 
[30]:   test's l2:139.7 
[31]:   test's l2:139.545 
[32]:   test's l2:139.475 
[33]:   test's l2:139.481 
[34]:   test's l2:139.373 
[35]:   test's l2:139.345 
[36]:   test's l2:139.225 
[37]:   test's l2:139.149 
[38]:   test's l2:139.166 
[39]:   test's l2:139.136 
[40]:   test's l2:139.053 
[41]:   test's l2:139.071 
[42]:   test's l2:138.939 
[43]:   test's l2:138.83 
[44]:   test's l2:138.786 
[45]:   test's l2:138.735 
[46]:   test's l2:138.696 
[47]:   test's l2:138.702 
[48]:   test's l2:138.571 
[49]:   test's l2:138.53 
[50]:   test's l2:138.504 
[51]:   test's l2:138.481 
[52]:   test's l2:138.376 
[53]:   test's l2:138.354 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.235 
[59]:   test's l2:138.151 
[60]:   test's l2:138.086 
[61]:   test's l2:137.99 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.605 
[69]:   test's l2:137.607 
[70]:   test's l2:137.568 
[71]:   test's l2:137.477 
[72]:   test's l2:137.452 
[73]:   test's l2:137.432 
[74]:   test's l2:137.384 
[75]:   test's l2:137.415 
[76]:   test's l2:137.376 
[77]:   test's l2:137.375 
[78]:   test's l2:137.395 
[79]:   test's l2:137.301 
[80]:   test's l2:137.308 
[81]:   test's l2:137.332 
[82]:   test's l2:137.328 
[83]:   test's l2:137.295 
[84]:   test's l2:137.22 
[85]:   test's l2:137.258 
[86]:   test's l2:137.191 
[87]:   test's l2:137.181 
[88]:   test's l2:137.115 
[89]:   test's l2:137.134 
[90]:   test's l2:137.128 
[91]:   test's l2:137.123 
[92]:   test's l2:137.134 
[93]:   test's l2:137.077 
[94]:   test's l2:137.024 
[95]:   test's l2:136.953 
[96]:   test's l2:136.96 
[97]:   test's l2:136.969 
[98]:   test's l2:136.932 
[99]:   test's l2:136.948 
[100]:  test's l2:136.969 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.056511 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.3 
[2]:    test's l2:147.083 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.801 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.332 
[9]:    test's l2:143.306 
[10]:   test's l2:143.03 
[11]:   test's l2:142.687 
[12]:   test's l2:142.381 
[13]:   test's l2:142.111 
[14]:   test's l2:141.948 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.42 
[18]:   test's l2:141.315 
[19]:   test's l2:141.199 
[20]:   test's l2:140.958 
[21]:   test's l2:140.861 
[22]:   test's l2:140.733 
[23]:   test's l2:140.554 
[24]:   test's l2:140.403 
[25]:   test's l2:140.286 
[26]:   test's l2:140.172 
[27]:   test's l2:140.118 
[28]:   test's l2:139.973 
[29]:   test's l2:139.893 
[30]:   test's l2:139.701 
[31]:   test's l2:139.546 
[32]:   test's l2:139.476 
[33]:   test's l2:139.481 
[34]:   test's l2:139.374 
[35]:   test's l2:139.345 
[36]:   test's l2:139.226 
[37]:   test's l2:139.15 
[38]:   test's l2:139.166 
[39]:   test's l2:139.137 
[40]:   test's l2:139.054 
[41]:   test's l2:139.071 
[42]:   test's l2:138.939 
[43]:   test's l2:138.83 
[44]:   test's l2:138.786 
[45]:   test's l2:138.735 
[46]:   test's l2:138.696 
[47]:   test's l2:138.703 
[48]:   test's l2:138.571 
[49]:   test's l2:138.53 
[50]:   test's l2:138.504 
[51]:   test's l2:138.481 
[52]:   test's l2:138.376 
[53]:   test's l2:138.354 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.235 
[59]:   test's l2:138.15 
[60]:   test's l2:138.086 
[61]:   test's l2:137.989 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.605 
[69]:   test's l2:137.607 
[70]:   test's l2:137.568 
[71]:   test's l2:137.477 
[72]:   test's l2:137.452 
[73]:   test's l2:137.432 
[74]:   test's l2:137.384 
[75]:   test's l2:137.415 
[76]:   test's l2:137.376 
[77]:   test's l2:137.375 
[78]:   test's l2:137.395 
[79]:   test's l2:137.301 
[80]:   test's l2:137.308 
[81]:   test's l2:137.332 
[82]:   test's l2:137.328 
[83]:   test's l2:137.295 
[84]:   test's l2:137.22 
[85]:   test's l2:137.258 
[86]:   test's l2:137.191 
[87]:   test's l2:137.181 
[88]:   test's l2:137.115 
[89]:   test's l2:137.134 
[90]:   test's l2:137.128 
[91]:   test's l2:137.122 
[92]:   test's l2:137.133 
[93]:   test's l2:137.077 
[94]:   test's l2:137.024 
[95]:   test's l2:136.952 
[96]:   test's l2:136.959 
[97]:   test's l2:136.923 
[98]:   test's l2:136.932 
[99]:   test's l2:136.947 
[100]:  test's l2:136.921 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.051197 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.3 
[2]:    test's l2:147.083 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.801 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.332 
[9]:    test's l2:143.306 
[10]:   test's l2:143.03 
[11]:   test's l2:142.687 
[12]:   test's l2:142.381 
[13]:   test's l2:142.111 
[14]:   test's l2:141.948 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.42 
[18]:   test's l2:141.315 
[19]:   test's l2:141.199 
[20]:   test's l2:140.958 
[21]:   test's l2:140.861 
[22]:   test's l2:140.733 
[23]:   test's l2:140.554 
[24]:   test's l2:140.403 
[25]:   test's l2:140.286 
[26]:   test's l2:140.172 
[27]:   test's l2:140.118 
[28]:   test's l2:139.973 
[29]:   test's l2:139.893 
[30]:   test's l2:139.701 
[31]:   test's l2:139.546 
[32]:   test's l2:139.476 
[33]:   test's l2:139.481 
[34]:   test's l2:139.374 
[35]:   test's l2:139.345 
[36]:   test's l2:139.226 
[37]:   test's l2:139.15 
[38]:   test's l2:139.166 
[39]:   test's l2:139.137 
[40]:   test's l2:139.054 
[41]:   test's l2:139.071 
[42]:   test's l2:138.939 
[43]:   test's l2:138.83 
[44]:   test's l2:138.786 
[45]:   test's l2:138.735 
[46]:   test's l2:138.696 
[47]:   test's l2:138.703 
[48]:   test's l2:138.571 
[49]:   test's l2:138.53 
[50]:   test's l2:138.504 
[51]:   test's l2:138.481 
[52]:   test's l2:138.376 
[53]:   test's l2:138.354 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.235 
[59]:   test's l2:138.15 
[60]:   test's l2:138.086 
[61]:   test's l2:137.989 
[62]:   test's l2:137.979 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.605 
[69]:   test's l2:137.607 
[70]:   test's l2:137.568 
[71]:   test's l2:137.477 
[72]:   test's l2:137.452 
[73]:   test's l2:137.432 
[74]:   test's l2:137.384 
[75]:   test's l2:137.415 
[76]:   test's l2:137.376 
[77]:   test's l2:137.375 
[78]:   test's l2:137.395 
[79]:   test's l2:137.301 
[80]:   test's l2:137.308 
[81]:   test's l2:137.332 
[82]:   test's l2:137.328 
[83]:   test's l2:137.295 
[84]:   test's l2:137.22 
[85]:   test's l2:137.258 
[86]:   test's l2:137.191 
[87]:   test's l2:137.181 
[88]:   test's l2:137.115 
[89]:   test's l2:137.134 
[90]:   test's l2:137.128 
[91]:   test's l2:137.122 
[92]:   test's l2:137.133 
[93]:   test's l2:137.077 
[94]:   test's l2:137.024 
[95]:   test's l2:136.952 
[96]:   test's l2:136.959 
[97]:   test's l2:136.923 
[98]:   test's l2:136.932 
[99]:   test's l2:136.947 
[100]:  test's l2:136.921 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing col-wise multi-threading, the overhead of testing was 0.060263 seconds.
You can set `force_col_wise=true` to remove the overhead.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.3 
[2]:    test's l2:147.083 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.801 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.332 
[9]:    test's l2:143.306 
[10]:   test's l2:143.029 
[11]:   test's l2:142.686 
[12]:   test's l2:142.381 
[13]:   test's l2:142.111 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.42 
[18]:   test's l2:141.315 
[19]:   test's l2:141.199 
[20]:   test's l2:140.958 
[21]:   test's l2:140.861 
[22]:   test's l2:140.733 
[23]:   test's l2:140.554 
[24]:   test's l2:140.403 
[25]:   test's l2:140.286 
[26]:   test's l2:140.172 
[27]:   test's l2:140.118 
[28]:   test's l2:139.973 
[29]:   test's l2:139.893 
[30]:   test's l2:139.701 
[31]:   test's l2:139.546 
[32]:   test's l2:139.476 
[33]:   test's l2:139.482 
[34]:   test's l2:139.374 
[35]:   test's l2:139.345 
[36]:   test's l2:139.226 
[37]:   test's l2:139.15 
[38]:   test's l2:139.167 
[39]:   test's l2:139.137 
[40]:   test's l2:139.054 
[41]:   test's l2:138.922 
[42]:   test's l2:138.94 
[43]:   test's l2:138.83 
[44]:   test's l2:138.786 
[45]:   test's l2:138.735 
[46]:   test's l2:138.697 
[47]:   test's l2:138.703 
[48]:   test's l2:138.572 
[49]:   test's l2:138.531 
[50]:   test's l2:138.505 
[51]:   test's l2:138.481 
[52]:   test's l2:138.376 
[53]:   test's l2:138.354 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.235 
[59]:   test's l2:138.15 
[60]:   test's l2:138.086 
[61]:   test's l2:137.989 
[62]:   test's l2:137.978 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.605 
[69]:   test's l2:137.579 
[70]:   test's l2:137.559 
[71]:   test's l2:137.561 
[72]:   test's l2:137.523 
[73]:   test's l2:137.432 
[74]:   test's l2:137.384 
[75]:   test's l2:137.415 
[76]:   test's l2:137.376 
[77]:   test's l2:137.375 
[78]:   test's l2:137.394 
[79]:   test's l2:137.301 
[80]:   test's l2:137.308 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.219 
[85]:   test's l2:137.152 
[86]:   test's l2:137.142 
[87]:   test's l2:137.076 
[88]:   test's l2:137.095 
[89]:   test's l2:137.089 
[90]:   test's l2:137.113 
[91]:   test's l2:137.108 
[92]:   test's l2:137.039 
[93]:   test's l2:136.986 
[94]:   test's l2:136.948 
[95]:   test's l2:136.955 
[96]:   test's l2:136.964 
[97]:   test's l2:136.975 
[98]:   test's l2:136.914 
[99]:   test's l2:136.93 
[100]:  test's l2:136.903 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.057181 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.3 
[2]:    test's l2:147.083 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.801 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.332 
[9]:    test's l2:143.306 
[10]:   test's l2:143.029 
[11]:   test's l2:142.686 
[12]:   test's l2:142.381 
[13]:   test's l2:142.111 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.42 
[18]:   test's l2:141.315 
[19]:   test's l2:141.199 
[20]:   test's l2:140.958 
[21]:   test's l2:140.861 
[22]:   test's l2:140.733 
[23]:   test's l2:140.554 
[24]:   test's l2:140.403 
[25]:   test's l2:140.286 
[26]:   test's l2:140.172 
[27]:   test's l2:140.118 
[28]:   test's l2:139.973 
[29]:   test's l2:139.893 
[30]:   test's l2:139.701 
[31]:   test's l2:139.546 
[32]:   test's l2:139.476 
[33]:   test's l2:139.482 
[34]:   test's l2:139.374 
[35]:   test's l2:139.345 
[36]:   test's l2:139.226 
[37]:   test's l2:139.15 
[38]:   test's l2:139.167 
[39]:   test's l2:139.137 
[40]:   test's l2:139.054 
[41]:   test's l2:138.922 
[42]:   test's l2:138.94 
[43]:   test's l2:138.83 
[44]:   test's l2:138.786 
[45]:   test's l2:138.735 
[46]:   test's l2:138.697 
[47]:   test's l2:138.703 
[48]:   test's l2:138.572 
[49]:   test's l2:138.531 
[50]:   test's l2:138.505 
[51]:   test's l2:138.481 
[52]:   test's l2:138.376 
[53]:   test's l2:138.354 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.235 
[59]:   test's l2:138.15 
[60]:   test's l2:138.086 
[61]:   test's l2:137.989 
[62]:   test's l2:137.978 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.674 
[67]:   test's l2:137.644 
[68]:   test's l2:137.605 
[69]:   test's l2:137.579 
[70]:   test's l2:137.559 
[71]:   test's l2:137.561 
[72]:   test's l2:137.523 
[73]:   test's l2:137.432 
[74]:   test's l2:137.384 
[75]:   test's l2:137.415 
[76]:   test's l2:137.376 
[77]:   test's l2:137.375 
[78]:   test's l2:137.394 
[79]:   test's l2:137.301 
[80]:   test's l2:137.308 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.219 
[85]:   test's l2:137.152 
[86]:   test's l2:137.142 
[87]:   test's l2:137.076 
[88]:   test's l2:137.095 
[89]:   test's l2:137.089 
[90]:   test's l2:137.113 
[91]:   test's l2:137.108 
[92]:   test's l2:137.039 
[93]:   test's l2:136.986 
[94]:   test's l2:136.948 
[95]:   test's l2:136.955 
[96]:   test's l2:136.964 
[97]:   test's l2:136.975 
[98]:   test's l2:136.914 
[99]:   test's l2:136.93 
[100]:  test's l2:136.903 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.059271 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.3 
[2]:    test's l2:147.083 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.801 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.332 
[9]:    test's l2:143.306 
[10]:   test's l2:143.029 
[11]:   test's l2:142.686 
[12]:   test's l2:142.381 
[13]:   test's l2:142.111 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.42 
[18]:   test's l2:141.315 
[19]:   test's l2:141.199 
[20]:   test's l2:140.958 
[21]:   test's l2:140.861 
[22]:   test's l2:140.733 
[23]:   test's l2:140.554 
[24]:   test's l2:140.404 
[25]:   test's l2:140.286 
[26]:   test's l2:140.172 
[27]:   test's l2:140.119 
[28]:   test's l2:139.973 
[29]:   test's l2:139.894 
[30]:   test's l2:139.702 
[31]:   test's l2:139.547 
[32]:   test's l2:139.477 
[33]:   test's l2:139.482 
[34]:   test's l2:139.374 
[35]:   test's l2:139.346 
[36]:   test's l2:139.226 
[37]:   test's l2:139.151 
[38]:   test's l2:139.167 
[39]:   test's l2:139.137 
[40]:   test's l2:139.054 
[41]:   test's l2:138.923 
[42]:   test's l2:138.814 
[43]:   test's l2:138.831 
[44]:   test's l2:138.787 
[45]:   test's l2:138.736 
[46]:   test's l2:138.697 
[47]:   test's l2:138.703 
[48]:   test's l2:138.572 
[49]:   test's l2:138.531 
[50]:   test's l2:138.505 
[51]:   test's l2:138.481 
[52]:   test's l2:138.376 
[53]:   test's l2:138.354 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.235 
[59]:   test's l2:138.15 
[60]:   test's l2:138.086 
[61]:   test's l2:137.989 
[62]:   test's l2:137.978 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.675 
[67]:   test's l2:137.645 
[68]:   test's l2:137.606 
[69]:   test's l2:137.579 
[70]:   test's l2:137.559 
[71]:   test's l2:137.561 
[72]:   test's l2:137.523 
[73]:   test's l2:137.432 
[74]:   test's l2:137.384 
[75]:   test's l2:137.415 
[76]:   test's l2:137.376 
[77]:   test's l2:137.375 
[78]:   test's l2:137.394 
[79]:   test's l2:137.301 
[80]:   test's l2:137.308 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.219 
[85]:   test's l2:137.152 
[86]:   test's l2:137.142 
[87]:   test's l2:137.076 
[88]:   test's l2:137.095 
[89]:   test's l2:137.089 
[90]:   test's l2:137.113 
[91]:   test's l2:137.107 
[92]:   test's l2:137.039 
[93]:   test's l2:136.986 
[94]:   test's l2:136.948 
[95]:   test's l2:136.955 
[96]:   test's l2:136.964 
[97]:   test's l2:136.975 
[98]:   test's l2:136.914 
[99]:   test's l2:136.929 
[100]:  test's l2:136.903 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.035984 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.3 
[2]:    test's l2:147.083 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.801 
[6]:    test's l2:144.095 
[7]:    test's l2:143.654 
[8]:    test's l2:143.332 
[9]:    test's l2:143.306 
[10]:   test's l2:143.029 
[11]:   test's l2:142.686 
[12]:   test's l2:142.381 
[13]:   test's l2:142.111 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.42 
[18]:   test's l2:141.315 
[19]:   test's l2:141.199 
[20]:   test's l2:140.958 
[21]:   test's l2:140.861 
[22]:   test's l2:140.733 
[23]:   test's l2:140.554 
[24]:   test's l2:140.404 
[25]:   test's l2:140.286 
[26]:   test's l2:140.172 
[27]:   test's l2:140.119 
[28]:   test's l2:139.973 
[29]:   test's l2:139.894 
[30]:   test's l2:139.702 
[31]:   test's l2:139.547 
[32]:   test's l2:139.477 
[33]:   test's l2:139.482 
[34]:   test's l2:139.374 
[35]:   test's l2:139.346 
[36]:   test's l2:139.226 
[37]:   test's l2:139.151 
[38]:   test's l2:139.167 
[39]:   test's l2:139.137 
[40]:   test's l2:139.054 
[41]:   test's l2:138.923 
[42]:   test's l2:138.814 
[43]:   test's l2:138.831 
[44]:   test's l2:138.787 
[45]:   test's l2:138.736 
[46]:   test's l2:138.697 
[47]:   test's l2:138.703 
[48]:   test's l2:138.572 
[49]:   test's l2:138.531 
[50]:   test's l2:138.505 
[51]:   test's l2:138.481 
[52]:   test's l2:138.376 
[53]:   test's l2:138.354 
[54]:   test's l2:138.344 
[55]:   test's l2:138.315 
[56]:   test's l2:138.314 
[57]:   test's l2:138.244 
[58]:   test's l2:138.235 
[59]:   test's l2:138.15 
[60]:   test's l2:138.086 
[61]:   test's l2:137.989 
[62]:   test's l2:137.978 
[63]:   test's l2:137.909 
[64]:   test's l2:137.817 
[65]:   test's l2:137.755 
[66]:   test's l2:137.675 
[67]:   test's l2:137.645 
[68]:   test's l2:137.606 
[69]:   test's l2:137.579 
[70]:   test's l2:137.559 
[71]:   test's l2:137.561 
[72]:   test's l2:137.523 
[73]:   test's l2:137.432 
[74]:   test's l2:137.384 
[75]:   test's l2:137.415 
[76]:   test's l2:137.376 
[77]:   test's l2:137.375 
[78]:   test's l2:137.394 
[79]:   test's l2:137.301 
[80]:   test's l2:137.308 
[81]:   test's l2:137.331 
[82]:   test's l2:137.328 
[83]:   test's l2:137.294 
[84]:   test's l2:137.219 
[85]:   test's l2:137.152 
[86]:   test's l2:137.142 
[87]:   test's l2:137.076 
[88]:   test's l2:137.095 
[89]:   test's l2:137.089 
[90]:   test's l2:137.113 
[91]:   test's l2:137.107 
[92]:   test's l2:137.039 
[93]:   test's l2:136.986 
[94]:   test's l2:136.948 
[95]:   test's l2:136.955 
[96]:   test's l2:136.964 
[97]:   test's l2:136.975 
[98]:   test's l2:136.914 
[99]:   test's l2:136.929 
[100]:  test's l2:136.903 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.062640 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.3 
[2]:    test's l2:147.083 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.801 
[6]:    test's l2:144.094 
[7]:    test's l2:143.654 
[8]:    test's l2:143.332 
[9]:    test's l2:143.305 
[10]:   test's l2:143.029 
[11]:   test's l2:142.686 
[12]:   test's l2:142.381 
[13]:   test's l2:142.111 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.42 
[18]:   test's l2:141.316 
[19]:   test's l2:141.199 
[20]:   test's l2:140.959 
[21]:   test's l2:140.861 
[22]:   test's l2:140.733 
[23]:   test's l2:140.554 
[24]:   test's l2:140.404 
[25]:   test's l2:140.287 
[26]:   test's l2:140.172 
[27]:   test's l2:140.119 
[28]:   test's l2:139.974 
[29]:   test's l2:139.894 
[30]:   test's l2:139.702 
[31]:   test's l2:139.547 
[32]:   test's l2:139.478 
[33]:   test's l2:139.483 
[34]:   test's l2:139.375 
[35]:   test's l2:139.346 
[36]:   test's l2:139.227 
[37]:   test's l2:139.151 
[38]:   test's l2:139.167 
[39]:   test's l2:139.137 
[40]:   test's l2:139.055 
[41]:   test's l2:138.923 
[42]:   test's l2:138.814 
[43]:   test's l2:138.831 
[44]:   test's l2:138.836 
[45]:   test's l2:138.793 
[46]:   test's l2:138.742 
[47]:   test's l2:138.705 
[48]:   test's l2:138.574 
[49]:   test's l2:138.533 
[50]:   test's l2:138.507 
[51]:   test's l2:138.483 
[52]:   test's l2:138.377 
[53]:   test's l2:138.355 
[54]:   test's l2:138.346 
[55]:   test's l2:138.316 
[56]:   test's l2:138.315 
[57]:   test's l2:138.263 
[58]:   test's l2:138.254 
[59]:   test's l2:138.169 
[60]:   test's l2:138.105 
[61]:   test's l2:138.007 
[62]:   test's l2:137.996 
[63]:   test's l2:137.927 
[64]:   test's l2:137.835 
[65]:   test's l2:137.773 
[66]:   test's l2:137.693 
[67]:   test's l2:137.666 
[68]:   test's l2:137.647 
[69]:   test's l2:137.648 
[70]:   test's l2:137.608 
[71]:   test's l2:137.578 
[72]:   test's l2:137.539 
[73]:   test's l2:137.449 
[74]:   test's l2:137.401 
[75]:   test's l2:137.432 
[76]:   test's l2:137.43 
[77]:   test's l2:137.449 
[78]:   test's l2:137.411 
[79]:   test's l2:137.317 
[80]:   test's l2:137.324 
[81]:   test's l2:137.347 
[82]:   test's l2:137.344 
[83]:   test's l2:137.31 
[84]:   test's l2:137.235 
[85]:   test's l2:137.168 
[86]:   test's l2:137.158 
[87]:   test's l2:137.181 
[88]:   test's l2:137.097 
[89]:   test's l2:137.12 
[90]:   test's l2:137.114 
[91]:   test's l2:137.109 
[92]:   test's l2:137.054 
[93]:   test's l2:137.016 
[94]:   test's l2:137.027 
[95]:   test's l2:136.97 
[96]:   test's l2:136.977 
[97]:   test's l2:136.907 
[98]:   test's l2:136.921 
[99]:   test's l2:136.931 
[100]:  test's l2:136.906 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.050324 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.3 
[2]:    test's l2:147.083 
[3]:    test's l2:145.97 
[4]:    test's l2:145.398 
[5]:    test's l2:144.801 
[6]:    test's l2:144.094 
[7]:    test's l2:143.654 
[8]:    test's l2:143.332 
[9]:    test's l2:143.305 
[10]:   test's l2:143.029 
[11]:   test's l2:142.686 
[12]:   test's l2:142.381 
[13]:   test's l2:142.111 
[14]:   test's l2:141.947 
[15]:   test's l2:141.777 
[16]:   test's l2:141.73 
[17]:   test's l2:141.42 
[18]:   test's l2:141.316 
[19]:   test's l2:141.199 
[20]:   test's l2:140.959 
[21]:   test's l2:140.861 
[22]:   test's l2:140.733 
[23]:   test's l2:140.554 
[24]:   test's l2:140.404 
[25]:   test's l2:140.287 
[26]:   test's l2:140.172 
[27]:   test's l2:140.119 
[28]:   test's l2:139.974 
[29]:   test's l2:139.894 
[30]:   test's l2:139.702 
[31]:   test's l2:139.547 
[32]:   test's l2:139.478 
[33]:   test's l2:139.483 
[34]:   test's l2:139.375 
[35]:   test's l2:139.346 
[36]:   test's l2:139.227 
[37]:   test's l2:139.151 
[38]:   test's l2:139.167 
[39]:   test's l2:139.137 
[40]:   test's l2:139.055 
[41]:   test's l2:138.923 
[42]:   test's l2:138.814 
[43]:   test's l2:138.831 
[44]:   test's l2:138.836 
[45]:   test's l2:138.793 
[46]:   test's l2:138.742 
[47]:   test's l2:138.705 
[48]:   test's l2:138.574 
[49]:   test's l2:138.533 
[50]:   test's l2:138.507 
[51]:   test's l2:138.483 
[52]:   test's l2:138.377 
[53]:   test's l2:138.355 
[54]:   test's l2:138.346 
[55]:   test's l2:138.316 
[56]:   test's l2:138.315 
[57]:   test's l2:138.263 
[58]:   test's l2:138.254 
[59]:   test's l2:138.169 
[60]:   test's l2:138.105 
[61]:   test's l2:138.007 
[62]:   test's l2:137.996 
[63]:   test's l2:137.927 
[64]:   test's l2:137.835 
[65]:   test's l2:137.773 
[66]:   test's l2:137.693 
[67]:   test's l2:137.666 
[68]:   test's l2:137.647 
[69]:   test's l2:137.648 
[70]:   test's l2:137.608 
[71]:   test's l2:137.578 
[72]:   test's l2:137.539 
[73]:   test's l2:137.449 
[74]:   test's l2:137.401 
[75]:   test's l2:137.432 
[76]:   test's l2:137.43 
[77]:   test's l2:137.449 
[78]:   test's l2:137.411 
[79]:   test's l2:137.317 
[80]:   test's l2:137.324 
[81]:   test's l2:137.347 
[82]:   test's l2:137.344 
[83]:   test's l2:137.31 
[84]:   test's l2:137.235 
[85]:   test's l2:137.168 
[86]:   test's l2:137.158 
[87]:   test's l2:137.181 
[88]:   test's l2:137.097 
[89]:   test's l2:137.12 
[90]:   test's l2:137.114 
[91]:   test's l2:137.109 
[92]:   test's l2:137.054 
[93]:   test's l2:137.016 
[94]:   test's l2:137.027 
[95]:   test's l2:136.97 
[96]:   test's l2:136.977 
[97]:   test's l2:136.907 
[98]:   test's l2:136.921 
[99]:   test's l2:136.931 
[100]:  test's l2:136.906 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.038403 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.367 
[2]:    test's l2:147.156 
[3]:    test's l2:146.037 
[4]:    test's l2:145.455 
[5]:    test's l2:144.854 
[6]:    test's l2:144.154 
[7]:    test's l2:143.719 
[8]:    test's l2:143.403 
[9]:    test's l2:143.373 
[10]:   test's l2:143.096 
[11]:   test's l2:142.765 
[12]:   test's l2:142.46 
[13]:   test's l2:142.19 
[14]:   test's l2:142.026 
[15]:   test's l2:141.857 
[16]:   test's l2:141.808 
[17]:   test's l2:141.702 
[18]:   test's l2:141.395 
[19]:   test's l2:141.278 
[20]:   test's l2:141.172 
[21]:   test's l2:141.047 
[22]:   test's l2:140.813 
[23]:   test's l2:140.637 
[24]:   test's l2:140.486 
[25]:   test's l2:140.372 
[26]:   test's l2:140.256 
[27]:   test's l2:140.201 
[28]:   test's l2:140.055 
[29]:   test's l2:139.974 
[30]:   test's l2:139.783 
[31]:   test's l2:139.787 
[32]:   test's l2:139.634 
[33]:   test's l2:139.564 
[34]:   test's l2:139.458 
[35]:   test's l2:139.43 
[36]:   test's l2:139.309 
[37]:   test's l2:139.232 
[38]:   test's l2:139.247 
[39]:   test's l2:139.233 
[40]:   test's l2:139.196 
[41]:   test's l2:139.085 
[42]:   test's l2:138.954 
[43]:   test's l2:138.97 
[44]:   test's l2:138.931 
[45]:   test's l2:138.887 
[46]:   test's l2:138.836 
[47]:   test's l2:138.811 
[48]:   test's l2:138.781 
[49]:   test's l2:138.647 
[50]:   test's l2:138.606 
[51]:   test's l2:138.613 
[52]:   test's l2:138.506 
[53]:   test's l2:138.484 
[54]:   test's l2:138.522 
[55]:   test's l2:138.512 
[56]:   test's l2:138.51 
[57]:   test's l2:138.457 
[58]:   test's l2:138.448 
[59]:   test's l2:138.364 
[60]:   test's l2:138.301 
[61]:   test's l2:138.266 
[62]:   test's l2:138.166 
[63]:   test's l2:138.161 
[64]:   test's l2:138.144 
[65]:   test's l2:138.058 
[66]:   test's l2:137.99 
[67]:   test's l2:137.92 
[68]:   test's l2:137.889 
[69]:   test's l2:137.851 
[70]:   test's l2:137.76 
[71]:   test's l2:137.711 
[72]:   test's l2:137.677 
[73]:   test's l2:137.698 
[74]:   test's l2:137.631 
[75]:   test's l2:137.655 
[76]:   test's l2:137.555 
[77]:   test's l2:137.508 
[78]:   test's l2:137.539 
[79]:   test's l2:137.467 
[80]:   test's l2:137.457 
[81]:   test's l2:137.456 
[82]:   test's l2:137.449 
[83]:   test's l2:137.411 
[84]:   test's l2:137.392 
[85]:   test's l2:137.321 
[86]:   test's l2:137.331 
[87]:   test's l2:137.355 
[88]:   test's l2:137.365 
[89]:   test's l2:137.301 
[90]:   test's l2:137.338 
[91]:   test's l2:137.291 
[92]:   test's l2:137.286 
[93]:   test's l2:137.195 
[94]:   test's l2:137.138 
[95]:   test's l2:137.134 
[96]:   test's l2:137.099 
[97]:   test's l2:137.055 
[98]:   test's l2:136.998 
[99]:   test's l2:136.982 
[100]:  test's l2:136.99 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.059785 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.367 
[2]:    test's l2:147.156 
[3]:    test's l2:146.037 
[4]:    test's l2:145.455 
[5]:    test's l2:144.854 
[6]:    test's l2:144.154 
[7]:    test's l2:143.719 
[8]:    test's l2:143.403 
[9]:    test's l2:143.373 
[10]:   test's l2:143.096 
[11]:   test's l2:142.765 
[12]:   test's l2:142.46 
[13]:   test's l2:142.19 
[14]:   test's l2:142.026 
[15]:   test's l2:141.857 
[16]:   test's l2:141.808 
[17]:   test's l2:141.702 
[18]:   test's l2:141.395 
[19]:   test's l2:141.278 
[20]:   test's l2:141.172 
[21]:   test's l2:141.047 
[22]:   test's l2:140.813 
[23]:   test's l2:140.637 
[24]:   test's l2:140.486 
[25]:   test's l2:140.372 
[26]:   test's l2:140.256 
[27]:   test's l2:140.201 
[28]:   test's l2:140.055 
[29]:   test's l2:139.974 
[30]:   test's l2:139.783 
[31]:   test's l2:139.787 
[32]:   test's l2:139.634 
[33]:   test's l2:139.564 
[34]:   test's l2:139.458 
[35]:   test's l2:139.43 
[36]:   test's l2:139.309 
[37]:   test's l2:139.232 
[38]:   test's l2:139.247 
[39]:   test's l2:139.233 
[40]:   test's l2:139.196 
[41]:   test's l2:139.085 
[42]:   test's l2:138.954 
[43]:   test's l2:138.97 
[44]:   test's l2:138.931 
[45]:   test's l2:138.887 
[46]:   test's l2:138.836 
[47]:   test's l2:138.811 
[48]:   test's l2:138.781 
[49]:   test's l2:138.647 
[50]:   test's l2:138.606 
[51]:   test's l2:138.613 
[52]:   test's l2:138.506 
[53]:   test's l2:138.484 
[54]:   test's l2:138.522 
[55]:   test's l2:138.512 
[56]:   test's l2:138.51 
[57]:   test's l2:138.457 
[58]:   test's l2:138.448 
[59]:   test's l2:138.364 
[60]:   test's l2:138.301 
[61]:   test's l2:138.266 
[62]:   test's l2:138.166 
[63]:   test's l2:138.161 
[64]:   test's l2:138.144 
[65]:   test's l2:138.058 
[66]:   test's l2:137.99 
[67]:   test's l2:137.92 
[68]:   test's l2:137.889 
[69]:   test's l2:137.851 
[70]:   test's l2:137.76 
[71]:   test's l2:137.711 
[72]:   test's l2:137.677 
[73]:   test's l2:137.698 
[74]:   test's l2:137.631 
[75]:   test's l2:137.655 
[76]:   test's l2:137.555 
[77]:   test's l2:137.508 
[78]:   test's l2:137.539 
[79]:   test's l2:137.467 
[80]:   test's l2:137.457 
[81]:   test's l2:137.456 
[82]:   test's l2:137.449 
[83]:   test's l2:137.411 
[84]:   test's l2:137.392 
[85]:   test's l2:137.321 
[86]:   test's l2:137.331 
[87]:   test's l2:137.355 
[88]:   test's l2:137.365 
[89]:   test's l2:137.301 
[90]:   test's l2:137.338 
[91]:   test's l2:137.291 
[92]:   test's l2:137.286 
[93]:   test's l2:137.195 
[94]:   test's l2:137.138 
[95]:   test's l2:137.134 
[96]:   test's l2:137.099 
[97]:   test's l2:137.055 
[98]:   test's l2:136.998 
[99]:   test's l2:136.982 
[100]:  test's l2:136.99 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.044343 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.367 
[2]:    test's l2:147.156 
[3]:    test's l2:146.037 
[4]:    test's l2:145.455 
[5]:    test's l2:144.854 
[6]:    test's l2:144.154 
[7]:    test's l2:143.719 
[8]:    test's l2:143.403 
[9]:    test's l2:143.373 
[10]:   test's l2:143.096 
[11]:   test's l2:142.765 
[12]:   test's l2:142.46 
[13]:   test's l2:142.19 
[14]:   test's l2:142.026 
[15]:   test's l2:141.856 
[16]:   test's l2:141.807 
[17]:   test's l2:141.702 
[18]:   test's l2:141.395 
[19]:   test's l2:141.278 
[20]:   test's l2:141.172 
[21]:   test's l2:141.047 
[22]:   test's l2:140.813 
[23]:   test's l2:140.637 
[24]:   test's l2:140.486 
[25]:   test's l2:140.373 
[26]:   test's l2:140.256 
[27]:   test's l2:140.201 
[28]:   test's l2:140.055 
[29]:   test's l2:139.975 
[30]:   test's l2:139.783 
[31]:   test's l2:139.787 
[32]:   test's l2:139.635 
[33]:   test's l2:139.565 
[34]:   test's l2:139.459 
[35]:   test's l2:139.43 
[36]:   test's l2:139.309 
[37]:   test's l2:139.232 
[38]:   test's l2:139.248 
[39]:   test's l2:139.233 
[40]:   test's l2:139.196 
[41]:   test's l2:139.086 
[42]:   test's l2:138.955 
[43]:   test's l2:138.97 
[44]:   test's l2:138.932 
[45]:   test's l2:138.888 
[46]:   test's l2:138.836 
[47]:   test's l2:138.811 
[48]:   test's l2:138.781 
[49]:   test's l2:138.648 
[50]:   test's l2:138.606 
[51]:   test's l2:138.614 
[52]:   test's l2:138.506 
[53]:   test's l2:138.484 
[54]:   test's l2:138.522 
[55]:   test's l2:138.512 
[56]:   test's l2:138.51 
[57]:   test's l2:138.457 
[58]:   test's l2:138.448 
[59]:   test's l2:138.364 
[60]:   test's l2:138.3 
[61]:   test's l2:138.266 
[62]:   test's l2:138.165 
[63]:   test's l2:138.161 
[64]:   test's l2:138.144 
[65]:   test's l2:138.058 
[66]:   test's l2:137.99 
[67]:   test's l2:137.92 
[68]:   test's l2:137.889 
[69]:   test's l2:137.851 
[70]:   test's l2:137.76 
[71]:   test's l2:137.711 
[72]:   test's l2:137.677 
[73]:   test's l2:137.698 
[74]:   test's l2:137.631 
[75]:   test's l2:137.655 
[76]:   test's l2:137.555 
[77]:   test's l2:137.508 
[78]:   test's l2:137.539 
[79]:   test's l2:137.467 
[80]:   test's l2:137.457 
[81]:   test's l2:137.456 
[82]:   test's l2:137.449 
[83]:   test's l2:137.411 
[84]:   test's l2:137.392 
[85]:   test's l2:137.321 
[86]:   test's l2:137.331 
[87]:   test's l2:137.355 
[88]:   test's l2:137.364 
[89]:   test's l2:137.301 
[90]:   test's l2:137.271 
[91]:   test's l2:137.308 
[92]:   test's l2:137.304 
[93]:   test's l2:137.255 
[94]:   test's l2:137.166 
[95]:   test's l2:137.162 
[96]:   test's l2:137.128 
[97]:   test's l2:137.072 
[98]:   test's l2:137.056 
[99]:   test's l2:137.013 
[100]:  test's l2:136.955 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.062881 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.367 
[2]:    test's l2:147.156 
[3]:    test's l2:146.037 
[4]:    test's l2:145.455 
[5]:    test's l2:144.854 
[6]:    test's l2:144.154 
[7]:    test's l2:143.719 
[8]:    test's l2:143.403 
[9]:    test's l2:143.373 
[10]:   test's l2:143.096 
[11]:   test's l2:142.765 
[12]:   test's l2:142.46 
[13]:   test's l2:142.19 
[14]:   test's l2:142.026 
[15]:   test's l2:141.856 
[16]:   test's l2:141.807 
[17]:   test's l2:141.702 
[18]:   test's l2:141.395 
[19]:   test's l2:141.278 
[20]:   test's l2:141.172 
[21]:   test's l2:141.047 
[22]:   test's l2:140.813 
[23]:   test's l2:140.637 
[24]:   test's l2:140.486 
[25]:   test's l2:140.373 
[26]:   test's l2:140.256 
[27]:   test's l2:140.201 
[28]:   test's l2:140.055 
[29]:   test's l2:139.975 
[30]:   test's l2:139.783 
[31]:   test's l2:139.787 
[32]:   test's l2:139.635 
[33]:   test's l2:139.565 
[34]:   test's l2:139.459 
[35]:   test's l2:139.43 
[36]:   test's l2:139.309 
[37]:   test's l2:139.232 
[38]:   test's l2:139.248 
[39]:   test's l2:139.233 
[40]:   test's l2:139.196 
[41]:   test's l2:139.086 
[42]:   test's l2:138.955 
[43]:   test's l2:138.97 
[44]:   test's l2:138.932 
[45]:   test's l2:138.888 
[46]:   test's l2:138.836 
[47]:   test's l2:138.811 
[48]:   test's l2:138.781 
[49]:   test's l2:138.648 
[50]:   test's l2:138.606 
[51]:   test's l2:138.614 
[52]:   test's l2:138.506 
[53]:   test's l2:138.484 
[54]:   test's l2:138.522 
[55]:   test's l2:138.512 
[56]:   test's l2:138.51 
[57]:   test's l2:138.457 
[58]:   test's l2:138.448 
[59]:   test's l2:138.364 
[60]:   test's l2:138.3 
[61]:   test's l2:138.266 
[62]:   test's l2:138.165 
[63]:   test's l2:138.161 
[64]:   test's l2:138.144 
[65]:   test's l2:138.058 
[66]:   test's l2:137.99 
[67]:   test's l2:137.92 
[68]:   test's l2:137.889 
[69]:   test's l2:137.851 
[70]:   test's l2:137.76 
[71]:   test's l2:137.711 
[72]:   test's l2:137.677 
[73]:   test's l2:137.698 
[74]:   test's l2:137.631 
[75]:   test's l2:137.655 
[76]:   test's l2:137.555 
[77]:   test's l2:137.508 
[78]:   test's l2:137.539 
[79]:   test's l2:137.467 
[80]:   test's l2:137.457 
[81]:   test's l2:137.456 
[82]:   test's l2:137.449 
[83]:   test's l2:137.411 
[84]:   test's l2:137.392 
[85]:   test's l2:137.321 
[86]:   test's l2:137.331 
[87]:   test's l2:137.355 
[88]:   test's l2:137.364 
[89]:   test's l2:137.301 
[90]:   test's l2:137.271 
[91]:   test's l2:137.308 
[92]:   test's l2:137.304 
[93]:   test's l2:137.255 
[94]:   test's l2:137.166 
[95]:   test's l2:137.162 
[96]:   test's l2:137.128 
[97]:   test's l2:137.072 
[98]:   test's l2:137.056 
[99]:   test's l2:137.013 
[100]:  test's l2:136.955 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.058157 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.366 
[2]:    test's l2:147.156 
[3]:    test's l2:146.037 
[4]:    test's l2:145.455 
[5]:    test's l2:144.854 
[6]:    test's l2:144.154 
[7]:    test's l2:143.719 
[8]:    test's l2:143.403 
[9]:    test's l2:143.373 
[10]:   test's l2:143.096 
[11]:   test's l2:142.765 
[12]:   test's l2:142.46 
[13]:   test's l2:142.19 
[14]:   test's l2:142.026 
[15]:   test's l2:141.856 
[16]:   test's l2:141.807 
[17]:   test's l2:141.702 
[18]:   test's l2:141.396 
[19]:   test's l2:141.278 
[20]:   test's l2:141.172 
[21]:   test's l2:141.048 
[22]:   test's l2:140.813 
[23]:   test's l2:140.637 
[24]:   test's l2:140.487 
[25]:   test's l2:140.373 
[26]:   test's l2:140.256 
[27]:   test's l2:140.202 
[28]:   test's l2:140.056 
[29]:   test's l2:139.975 
[30]:   test's l2:139.784 
[31]:   test's l2:139.788 
[32]:   test's l2:139.635 
[33]:   test's l2:139.565 
[34]:   test's l2:139.459 
[35]:   test's l2:139.43 
[36]:   test's l2:139.309 
[37]:   test's l2:139.232 
[38]:   test's l2:139.248 
[39]:   test's l2:139.234 
[40]:   test's l2:139.196 
[41]:   test's l2:139.086 
[42]:   test's l2:138.955 
[43]:   test's l2:138.971 
[44]:   test's l2:138.932 
[45]:   test's l2:138.888 
[46]:   test's l2:138.837 
[47]:   test's l2:138.811 
[48]:   test's l2:138.781 
[49]:   test's l2:138.648 
[50]:   test's l2:138.606 
[51]:   test's l2:138.614 
[52]:   test's l2:138.506 
[53]:   test's l2:138.484 
[54]:   test's l2:138.522 
[55]:   test's l2:138.511 
[56]:   test's l2:138.509 
[57]:   test's l2:138.457 
[58]:   test's l2:138.448 
[59]:   test's l2:138.364 
[60]:   test's l2:138.3 
[61]:   test's l2:138.266 
[62]:   test's l2:138.165 
[63]:   test's l2:138.161 
[64]:   test's l2:138.143 
[65]:   test's l2:138.076 
[66]:   test's l2:137.99 
[67]:   test's l2:137.919 
[68]:   test's l2:137.889 
[69]:   test's l2:137.85 
[70]:   test's l2:137.76 
[71]:   test's l2:137.711 
[72]:   test's l2:137.677 
[73]:   test's l2:137.698 
[74]:   test's l2:137.631 
[75]:   test's l2:137.655 
[76]:   test's l2:137.555 
[77]:   test's l2:137.508 
[78]:   test's l2:137.539 
[79]:   test's l2:137.467 
[80]:   test's l2:137.457 
[81]:   test's l2:137.45 
[82]:   test's l2:137.449 
[83]:   test's l2:137.411 
[84]:   test's l2:137.392 
[85]:   test's l2:137.321 
[86]:   test's l2:137.33 
[87]:   test's l2:137.355 
[88]:   test's l2:137.364 
[89]:   test's l2:137.301 
[90]:   test's l2:137.271 
[91]:   test's l2:137.307 
[92]:   test's l2:137.303 
[93]:   test's l2:137.255 
[94]:   test's l2:137.166 
[95]:   test's l2:137.162 
[96]:   test's l2:137.127 
[97]:   test's l2:137.071 
[98]:   test's l2:137.055 
[99]:   test's l2:137.012 
[100]:  test's l2:136.955 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.057903 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.366 
[2]:    test's l2:147.156 
[3]:    test's l2:146.037 
[4]:    test's l2:145.455 
[5]:    test's l2:144.854 
[6]:    test's l2:144.154 
[7]:    test's l2:143.719 
[8]:    test's l2:143.403 
[9]:    test's l2:143.373 
[10]:   test's l2:143.096 
[11]:   test's l2:142.765 
[12]:   test's l2:142.46 
[13]:   test's l2:142.19 
[14]:   test's l2:142.026 
[15]:   test's l2:141.856 
[16]:   test's l2:141.807 
[17]:   test's l2:141.702 
[18]:   test's l2:141.396 
[19]:   test's l2:141.278 
[20]:   test's l2:141.172 
[21]:   test's l2:141.048 
[22]:   test's l2:140.813 
[23]:   test's l2:140.637 
[24]:   test's l2:140.487 
[25]:   test's l2:140.373 
[26]:   test's l2:140.256 
[27]:   test's l2:140.202 
[28]:   test's l2:140.056 
[29]:   test's l2:139.975 
[30]:   test's l2:139.784 
[31]:   test's l2:139.788 
[32]:   test's l2:139.635 
[33]:   test's l2:139.565 
[34]:   test's l2:139.459 
[35]:   test's l2:139.43 
[36]:   test's l2:139.309 
[37]:   test's l2:139.232 
[38]:   test's l2:139.248 
[39]:   test's l2:139.234 
[40]:   test's l2:139.196 
[41]:   test's l2:139.086 
[42]:   test's l2:138.955 
[43]:   test's l2:138.971 
[44]:   test's l2:138.932 
[45]:   test's l2:138.888 
[46]:   test's l2:138.837 
[47]:   test's l2:138.811 
[48]:   test's l2:138.781 
[49]:   test's l2:138.648 
[50]:   test's l2:138.606 
[51]:   test's l2:138.614 
[52]:   test's l2:138.506 
[53]:   test's l2:138.484 
[54]:   test's l2:138.522 
[55]:   test's l2:138.511 
[56]:   test's l2:138.509 
[57]:   test's l2:138.457 
[58]:   test's l2:138.448 
[59]:   test's l2:138.364 
[60]:   test's l2:138.3 
[61]:   test's l2:138.266 
[62]:   test's l2:138.165 
[63]:   test's l2:138.161 
[64]:   test's l2:138.143 
[65]:   test's l2:138.076 
[66]:   test's l2:137.99 
[67]:   test's l2:137.919 
[68]:   test's l2:137.889 
[69]:   test's l2:137.85 
[70]:   test's l2:137.76 
[71]:   test's l2:137.711 
[72]:   test's l2:137.677 
[73]:   test's l2:137.698 
[74]:   test's l2:137.631 
[75]:   test's l2:137.655 
[76]:   test's l2:137.555 
[77]:   test's l2:137.508 
[78]:   test's l2:137.539 
[79]:   test's l2:137.467 
[80]:   test's l2:137.457 
[81]:   test's l2:137.45 
[82]:   test's l2:137.449 
[83]:   test's l2:137.411 
[84]:   test's l2:137.392 
[85]:   test's l2:137.321 
[86]:   test's l2:137.33 
[87]:   test's l2:137.355 
[88]:   test's l2:137.364 
[89]:   test's l2:137.301 
[90]:   test's l2:137.271 
[91]:   test's l2:137.307 
[92]:   test's l2:137.303 
[93]:   test's l2:137.255 
[94]:   test's l2:137.166 
[95]:   test's l2:137.162 
[96]:   test's l2:137.127 
[97]:   test's l2:137.071 
[98]:   test's l2:137.055 
[99]:   test's l2:137.012 
[100]:  test's l2:136.955 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.057516 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.366 
[2]:    test's l2:147.156 
[3]:    test's l2:146.037 
[4]:    test's l2:145.455 
[5]:    test's l2:144.854 
[6]:    test's l2:144.154 
[7]:    test's l2:143.719 
[8]:    test's l2:143.403 
[9]:    test's l2:143.373 
[10]:   test's l2:143.095 
[11]:   test's l2:142.765 
[12]:   test's l2:142.46 
[13]:   test's l2:142.19 
[14]:   test's l2:142.026 
[15]:   test's l2:141.856 
[16]:   test's l2:141.807 
[17]:   test's l2:141.702 
[18]:   test's l2:141.396 
[19]:   test's l2:141.278 
[20]:   test's l2:141.172 
[21]:   test's l2:141.048 
[22]:   test's l2:140.813 
[23]:   test's l2:140.637 
[24]:   test's l2:140.487 
[25]:   test's l2:140.373 
[26]:   test's l2:140.256 
[27]:   test's l2:140.202 
[28]:   test's l2:140.056 
[29]:   test's l2:139.975 
[30]:   test's l2:139.784 
[31]:   test's l2:139.788 
[32]:   test's l2:139.636 
[33]:   test's l2:139.565 
[34]:   test's l2:139.46 
[35]:   test's l2:139.431 
[36]:   test's l2:139.31 
[37]:   test's l2:139.233 
[38]:   test's l2:139.248 
[39]:   test's l2:139.234 
[40]:   test's l2:139.196 
[41]:   test's l2:139.086 
[42]:   test's l2:138.955 
[43]:   test's l2:138.971 
[44]:   test's l2:138.932 
[45]:   test's l2:138.888 
[46]:   test's l2:138.837 
[47]:   test's l2:138.811 
[48]:   test's l2:138.781 
[49]:   test's l2:138.648 
[50]:   test's l2:138.606 
[51]:   test's l2:138.614 
[52]:   test's l2:138.506 
[53]:   test's l2:138.484 
[54]:   test's l2:138.522 
[55]:   test's l2:138.511 
[56]:   test's l2:138.509 
[57]:   test's l2:138.457 
[58]:   test's l2:138.447 
[59]:   test's l2:138.363 
[60]:   test's l2:138.3 
[61]:   test's l2:138.265 
[62]:   test's l2:138.165 
[63]:   test's l2:138.161 
[64]:   test's l2:138.143 
[65]:   test's l2:138.076 
[66]:   test's l2:137.99 
[67]:   test's l2:137.919 
[68]:   test's l2:137.889 
[69]:   test's l2:137.85 
[70]:   test's l2:137.76 
[71]:   test's l2:137.711 
[72]:   test's l2:137.677 
[73]:   test's l2:137.698 
[74]:   test's l2:137.631 
[75]:   test's l2:137.655 
[76]:   test's l2:137.556 
[77]:   test's l2:137.508 
[78]:   test's l2:137.539 
[79]:   test's l2:137.467 
[80]:   test's l2:137.457 
[81]:   test's l2:137.45 
[82]:   test's l2:137.449 
[83]:   test's l2:137.411 
[84]:   test's l2:137.392 
[85]:   test's l2:137.321 
[86]:   test's l2:137.33 
[87]:   test's l2:137.354 
[88]:   test's l2:137.364 
[89]:   test's l2:137.3 
[90]:   test's l2:137.27 
[91]:   test's l2:137.266 
[92]:   test's l2:137.218 
[93]:   test's l2:137.13 
[94]:   test's l2:137.126 
[95]:   test's l2:137.091 
[96]:   test's l2:137.036 
[97]:   test's l2:137.072 
[98]:   test's l2:137.056 
[99]:   test's l2:137.014 
[100]:  test's l2:136.956 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.051094 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.366 
[2]:    test's l2:147.156 
[3]:    test's l2:146.037 
[4]:    test's l2:145.455 
[5]:    test's l2:144.854 
[6]:    test's l2:144.154 
[7]:    test's l2:143.719 
[8]:    test's l2:143.403 
[9]:    test's l2:143.373 
[10]:   test's l2:143.095 
[11]:   test's l2:142.765 
[12]:   test's l2:142.46 
[13]:   test's l2:142.19 
[14]:   test's l2:142.026 
[15]:   test's l2:141.856 
[16]:   test's l2:141.807 
[17]:   test's l2:141.702 
[18]:   test's l2:141.396 
[19]:   test's l2:141.278 
[20]:   test's l2:141.172 
[21]:   test's l2:141.048 
[22]:   test's l2:140.813 
[23]:   test's l2:140.637 
[24]:   test's l2:140.487 
[25]:   test's l2:140.373 
[26]:   test's l2:140.256 
[27]:   test's l2:140.202 
[28]:   test's l2:140.056 
[29]:   test's l2:139.975 
[30]:   test's l2:139.784 
[31]:   test's l2:139.788 
[32]:   test's l2:139.636 
[33]:   test's l2:139.565 
[34]:   test's l2:139.46 
[35]:   test's l2:139.431 
[36]:   test's l2:139.31 
[37]:   test's l2:139.233 
[38]:   test's l2:139.248 
[39]:   test's l2:139.234 
[40]:   test's l2:139.196 
[41]:   test's l2:139.086 
[42]:   test's l2:138.955 
[43]:   test's l2:138.971 
[44]:   test's l2:138.932 
[45]:   test's l2:138.888 
[46]:   test's l2:138.837 
[47]:   test's l2:138.811 
[48]:   test's l2:138.781 
[49]:   test's l2:138.648 
[50]:   test's l2:138.606 
[51]:   test's l2:138.614 
[52]:   test's l2:138.506 
[53]:   test's l2:138.484 
[54]:   test's l2:138.522 
[55]:   test's l2:138.511 
[56]:   test's l2:138.509 
[57]:   test's l2:138.457 
[58]:   test's l2:138.447 
[59]:   test's l2:138.363 
[60]:   test's l2:138.3 
[61]:   test's l2:138.265 
[62]:   test's l2:138.165 
[63]:   test's l2:138.161 
[64]:   test's l2:138.143 
[65]:   test's l2:138.076 
[66]:   test's l2:137.99 
[67]:   test's l2:137.919 
[68]:   test's l2:137.889 
[69]:   test's l2:137.85 
[70]:   test's l2:137.76 
[71]:   test's l2:137.711 
[72]:   test's l2:137.677 
[73]:   test's l2:137.698 
[74]:   test's l2:137.631 
[75]:   test's l2:137.655 
[76]:   test's l2:137.556 
[77]:   test's l2:137.508 
[78]:   test's l2:137.539 
[79]:   test's l2:137.467 
[80]:   test's l2:137.457 
[81]:   test's l2:137.45 
[82]:   test's l2:137.449 
[83]:   test's l2:137.411 
[84]:   test's l2:137.392 
[85]:   test's l2:137.321 
[86]:   test's l2:137.33 
[87]:   test's l2:137.354 
[88]:   test's l2:137.364 
[89]:   test's l2:137.3 
[90]:   test's l2:137.27 
[91]:   test's l2:137.266 
[92]:   test's l2:137.218 
[93]:   test's l2:137.13 
[94]:   test's l2:137.126 
[95]:   test's l2:137.091 
[96]:   test's l2:137.036 
[97]:   test's l2:137.072 
[98]:   test's l2:137.056 
[99]:   test's l2:137.014 
[100]:  test's l2:136.956 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.063687 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.366 
[2]:    test's l2:147.156 
[3]:    test's l2:146.037 
[4]:    test's l2:145.455 
[5]:    test's l2:144.854 
[6]:    test's l2:144.154 
[7]:    test's l2:143.719 
[8]:    test's l2:143.403 
[9]:    test's l2:143.373 
[10]:   test's l2:143.095 
[11]:   test's l2:142.764 
[12]:   test's l2:142.46 
[13]:   test's l2:142.19 
[14]:   test's l2:142.026 
[15]:   test's l2:141.856 
[16]:   test's l2:141.807 
[17]:   test's l2:141.702 
[18]:   test's l2:141.396 
[19]:   test's l2:141.278 
[20]:   test's l2:141.172 
[21]:   test's l2:141.048 
[22]:   test's l2:140.814 
[23]:   test's l2:140.637 
[24]:   test's l2:140.487 
[25]:   test's l2:140.373 
[26]:   test's l2:140.257 
[27]:   test's l2:140.202 
[28]:   test's l2:140.056 
[29]:   test's l2:139.975 
[30]:   test's l2:139.785 
[31]:   test's l2:139.789 
[32]:   test's l2:139.636 
[33]:   test's l2:139.566 
[34]:   test's l2:139.46 
[35]:   test's l2:139.431 
[36]:   test's l2:139.31 
[37]:   test's l2:139.233 
[38]:   test's l2:139.249 
[39]:   test's l2:139.234 
[40]:   test's l2:139.197 
[41]:   test's l2:139.086 
[42]:   test's l2:138.956 
[43]:   test's l2:138.971 
[44]:   test's l2:138.932 
[45]:   test's l2:138.888 
[46]:   test's l2:138.837 
[47]:   test's l2:138.812 
[48]:   test's l2:138.782 
[49]:   test's l2:138.648 
[50]:   test's l2:138.607 
[51]:   test's l2:138.614 
[52]:   test's l2:138.506 
[53]:   test's l2:138.484 
[54]:   test's l2:138.522 
[55]:   test's l2:138.511 
[56]:   test's l2:138.509 
[57]:   test's l2:138.456 
[58]:   test's l2:138.447 
[59]:   test's l2:138.363 
[60]:   test's l2:138.3 
[61]:   test's l2:138.265 
[62]:   test's l2:138.165 
[63]:   test's l2:138.16 
[64]:   test's l2:138.143 
[65]:   test's l2:138.076 
[66]:   test's l2:137.99 
[67]:   test's l2:137.919 
[68]:   test's l2:137.888 
[69]:   test's l2:137.85 
[70]:   test's l2:137.76 
[71]:   test's l2:137.711 
[72]:   test's l2:137.677 
[73]:   test's l2:137.698 
[74]:   test's l2:137.631 
[75]:   test's l2:137.655 
[76]:   test's l2:137.556 
[77]:   test's l2:137.508 
[78]:   test's l2:137.539 
[79]:   test's l2:137.467 
[80]:   test's l2:137.457 
[81]:   test's l2:137.45 
[82]:   test's l2:137.449 
[83]:   test's l2:137.43 
[84]:   test's l2:137.392 
[85]:   test's l2:137.321 
[86]:   test's l2:137.33 
[87]:   test's l2:137.354 
[88]:   test's l2:137.364 
[89]:   test's l2:137.3 
[90]:   test's l2:137.27 
[91]:   test's l2:137.266 
[92]:   test's l2:137.218 
[93]:   test's l2:137.13 
[94]:   test's l2:137.126 
[95]:   test's l2:137.091 
[96]:   test's l2:137.036 
[97]:   test's l2:137.072 
[98]:   test's l2:137.056 
[99]:   test's l2:137.013 
[100]:  test's l2:136.955 
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Unknown parameter: Depth
[LightGBM] [Warning] Auto-choosing row-wise multi-threading, the overhead of testing was 0.061104 seconds.
You can set `force_row_wise=true` to remove the overhead.
And if memory is not enough, you can set `force_col_wise=true`.
[LightGBM] [Info] Total Bins 38450
[LightGBM] [Info] Number of data points in the train set: 47099, number of used features: 451
[LightGBM] [Info] Start training from score 29.119387
[1]:    test's l2:148.366 
[2]:    test's l2:147.156 
[3]:    test's l2:146.037 
[4]:    test's l2:145.455 
[5]:    test's l2:144.854 
[6]:    test's l2:144.154 
[7]:    test's l2:143.719 
[8]:    test's l2:143.403 
[9]:    test's l2:143.373 
[10]:   test's l2:143.095 
[11]:   test's l2:142.764 
[12]:   test's l2:142.46 
[13]:   test's l2:142.19 
[14]:   test's l2:142.026 
[15]:   test's l2:141.856 
[16]:   test's l2:141.807 
[17]:   test's l2:141.702 
[18]:   test's l2:141.396 
[19]:   test's l2:141.278 
[20]:   test's l2:141.172 
[21]:   test's l2:141.048 
[22]:   test's l2:140.814 
[23]:   test's l2:140.637 
[24]:   test's l2:140.487 
[25]:   test's l2:140.373 
[26]:   test's l2:140.257 
[27]:   test's l2:140.202 
[28]:   test's l2:140.056 
[29]:   test's l2:139.975 
[30]:   test's l2:139.785 
[31]:   test's l2:139.789 
[32]:   test's l2:139.636 
[33]:   test's l2:139.566 
[34]:   test's l2:139.46 
[35]:   test's l2:139.431 
[36]:   test's l2:139.31 
[37]:   test's l2:139.233 
[38]:   test's l2:139.249 
[39]:   test's l2:139.234 
[40]:   test's l2:139.197 
[41]:   test's l2:139.086 
[42]:   test's l2:138.956 
[43]:   test's l2:138.971 
[44]:   test's l2:138.932 
[45]:   test's l2:138.888 
[46]:   test's l2:138.837 
[47]:   test's l2:138.812 
[48]:   test's l2:138.782 
[49]:   test's l2:138.648 
[50]:   test's l2:138.607 
[51]:   test's l2:138.614 
[52]:   test's l2:138.506 
[53]:   test's l2:138.484 
[54]:   test's l2:138.522 
[55]:   test's l2:138.511 
[56]:   test's l2:138.509 
[57]:   test's l2:138.456 
[58]:   test's l2:138.447 
[59]:   test's l2:138.363 
[60]:   test's l2:138.3 
[61]:   test's l2:138.265 
[62]:   test's l2:138.165 
[63]:   test's l2:138.16 
[64]:   test's l2:138.143 
[65]:   test's l2:138.076 
[66]:   test's l2:137.99 
[67]:   test's l2:137.919 
[68]:   test's l2:137.888 
[69]:   test's l2:137.85 
[70]:   test's l2:137.76 
[71]:   test's l2:137.711 
[72]:   test's l2:137.677 
[73]:   test's l2:137.698 
[74]:   test's l2:137.631 
[75]:   test's l2:137.655 
[76]:   test's l2:137.556 
[77]:   test's l2:137.508 
[78]:   test's l2:137.539 
[79]:   test's l2:137.467 
[80]:   test's l2:137.457 
[81]:   test's l2:137.45 
[82]:   test's l2:137.449 
[83]:   test's l2:137.43 
[84]:   test's l2:137.392 
[85]:   test's l2:137.321 
[86]:   test's l2:137.33 
[87]:   test's l2:137.354 
[88]:   test's l2:137.364 
[89]:   test's l2:137.3 
[90]:   test's l2:137.27 
[91]:   test's l2:137.266 
[92]:   test's l2:137.218 
[93]:   test's l2:137.13 
[94]:   test's l2:137.126 
[95]:   test's l2:137.091 
[96]:   test's l2:137.036 
[97]:   test's l2:137.072 
[98]:   test's l2:137.056 
[99]:   test's l2:137.013 
[100]:  test's l2:136.955 
cat("Model ", which.min(perf), " is lowest loss: ", min(perf), sep = "")
Model 13 is lowest loss: 136.6688
print(grid_search[which.min(perf), ])

Algorithms score is around 0.3 and computational time is:

lgbtaketime <- lstoptime - lstarttime
cat("The algorithms takes ", lgbtaketime, "seconds")
The algorithms takes  2.702765 seconds

catboost


catstartTime <- Sys.time()

fit_params <- list(l2_leaf_reg = 0.001,
                   depth=6,
                   learning_rate = 0.1,
                   iterations = 100,
                   random_seed = 233)


pool = catboost.load_pool(as.matrix(train_set.num_X), label = as.integer(train_set[,1]))

model <- catboost.train(pool, params = fit_params)
0:  learn: 12.1833360   total: 115ms    remaining: 11.3s
1:  learn: 12.1447534   total: 146ms    remaining: 7.16s
2:  learn: 12.1113175   total: 177ms    remaining: 5.73s
3:  learn: 12.0837512   total: 206ms    remaining: 4.93s
4:  learn: 12.0644307   total: 242ms    remaining: 4.59s
5:  learn: 12.0424650   total: 277ms    remaining: 4.34s
6:  learn: 12.0226752   total: 306ms    remaining: 4.07s
7:  learn: 12.0032701   total: 341ms    remaining: 3.93s
8:  learn: 11.9891605   total: 375ms    remaining: 3.8s
9:  learn: 11.9729407   total: 408ms    remaining: 3.67s
10: learn: 11.9583620   total: 440ms    remaining: 3.56s
11: learn: 11.9442188   total: 475ms    remaining: 3.49s
12: learn: 11.9297522   total: 509ms    remaining: 3.41s
13: learn: 11.9168966   total: 544ms    remaining: 3.34s
14: learn: 11.9025204   total: 583ms    remaining: 3.3s
15: learn: 11.8938549   total: 619ms    remaining: 3.25s
16: learn: 11.8863620   total: 656ms    remaining: 3.2s
17: learn: 11.8780062   total: 688ms    remaining: 3.13s
18: learn: 11.8716035   total: 722ms    remaining: 3.08s
19: learn: 11.8620134   total: 755ms    remaining: 3.02s
20: learn: 11.8514721   total: 789ms    remaining: 2.97s
21: learn: 11.8407465   total: 820ms    remaining: 2.91s
22: learn: 11.8311564   total: 855ms    remaining: 2.86s
23: learn: 11.8239107   total: 895ms    remaining: 2.83s
24: learn: 11.8180557   total: 935ms    remaining: 2.8s
25: learn: 11.8126754   total: 973ms    remaining: 2.77s
26: learn: 11.8061272   total: 1.01s    remaining: 2.74s
27: learn: 11.8002607   total: 1.05s    remaining: 2.7s
28: learn: 11.7956174   total: 1.09s    remaining: 2.68s
29: learn: 11.7898608   total: 1.13s    remaining: 2.63s
30: learn: 11.7850533   total: 1.16s    remaining: 2.58s
31: learn: 11.7760486   total: 1.2s remaining: 2.55s
32: learn: 11.7695443   total: 1.24s    remaining: 2.51s
33: learn: 11.7642735   total: 1.27s    remaining: 2.47s
34: learn: 11.7577410   total: 1.31s    remaining: 2.43s
35: learn: 11.7523458   total: 1.35s    remaining: 2.39s
36: learn: 11.7471293   total: 1.38s    remaining: 2.35s
37: learn: 11.7414293   total: 1.42s    remaining: 2.31s
38: learn: 11.7373151   total: 1.45s    remaining: 2.27s
39: learn: 11.7332491   total: 1.49s    remaining: 2.24s
40: learn: 11.7294174   total: 1.52s    remaining: 2.19s
41: learn: 11.7246231   total: 1.56s    remaining: 2.15s
42: learn: 11.7201055   total: 1.59s    remaining: 2.11s
43: learn: 11.7163502   total: 1.63s    remaining: 2.07s
44: learn: 11.7116538   total: 1.67s    remaining: 2.04s
45: learn: 11.7077535   total: 1.71s    remaining: 2s
46: learn: 11.7048007   total: 1.74s    remaining: 1.97s
47: learn: 11.7008189   total: 1.78s    remaining: 1.93s
48: learn: 11.6967152   total: 1.81s    remaining: 1.89s
49: learn: 11.6931495   total: 1.85s    remaining: 1.85s
50: learn: 11.6905724   total: 1.89s    remaining: 1.82s
51: learn: 11.6882417   total: 1.94s    remaining: 1.79s
52: learn: 11.6848625   total: 1.99s    remaining: 1.76s
53: learn: 11.6822779   total: 2.03s    remaining: 1.73s
54: learn: 11.6793212   total: 2.07s    remaining: 1.7s
55: learn: 11.6767365   total: 2.11s    remaining: 1.66s
56: learn: 11.6712698   total: 2.16s    remaining: 1.63s
57: learn: 11.6675051   total: 2.19s    remaining: 1.59s
58: learn: 11.6638487   total: 2.23s    remaining: 1.55s
59: learn: 11.6615166   total: 2.27s    remaining: 1.51s
60: learn: 11.6583897   total: 2.31s    remaining: 1.48s
61: learn: 11.6558059   total: 2.35s    remaining: 1.44s
62: learn: 11.6517810   total: 2.38s    remaining: 1.4s
63: learn: 11.6477295   total: 2.43s    remaining: 1.36s
64: learn: 11.6455330   total: 2.46s    remaining: 1.32s
65: learn: 11.6399977   total: 2.5s remaining: 1.29s
66: learn: 11.6368297   total: 2.55s    remaining: 1.25s
67: learn: 11.6342583   total: 2.59s    remaining: 1.22s
68: learn: 11.6309181   total: 2.63s    remaining: 1.18s
69: learn: 11.6285195   total: 2.67s    remaining: 1.14s
70: learn: 11.6251345   total: 2.71s    remaining: 1.11s
71: learn: 11.6225270   total: 2.76s    remaining: 1.07s
72: learn: 11.6188189   total: 2.8s remaining: 1.04s
73: learn: 11.6152862   total: 2.84s    remaining: 999ms
74: learn: 11.6108826   total: 2.88s    remaining: 960ms
75: learn: 11.6084601   total: 2.92s    remaining: 921ms
76: learn: 11.6051310   total: 2.96s    remaining: 883ms
77: learn: 11.6029455   total: 2.99s    remaining: 843ms
78: learn: 11.6008639   total: 3.02s    remaining: 804ms
79: learn: 11.5968136   total: 3.07s    remaining: 767ms
80: learn: 11.5939051   total: 3.11s    remaining: 729ms
81: learn: 11.5910807   total: 3.15s    remaining: 691ms
82: learn: 11.5887241   total: 3.19s    remaining: 653ms
83: learn: 11.5861250   total: 3.23s    remaining: 615ms
84: learn: 11.5824653   total: 3.27s    remaining: 577ms
85: learn: 11.5799506   total: 3.31s    remaining: 538ms
86: learn: 11.5777348   total: 3.35s    remaining: 501ms
87: learn: 11.5755174   total: 3.39s    remaining: 463ms
88: learn: 11.5733470   total: 3.43s    remaining: 424ms
89: learn: 11.5702128   total: 3.47s    remaining: 385ms
90: learn: 11.5686749   total: 3.5s remaining: 346ms
91: learn: 11.5660531   total: 3.54s    remaining: 308ms
92: learn: 11.5641327   total: 3.58s    remaining: 269ms
93: learn: 11.5614847   total: 3.61s    remaining: 230ms
94: learn: 11.5581782   total: 3.64s    remaining: 192ms
95: learn: 11.5566305   total: 3.68s    remaining: 153ms
96: learn: 11.5547748   total: 3.72s    remaining: 115ms
97: learn: 11.5497704   total: 3.75s    remaining: 76.5ms
98: learn: 11.5480024   total: 3.79s    remaining: 38.2ms
99: learn: 11.5460042   total: 3.82s    remaining: 0us
catstopTime <- Sys.time()

cattakenTime <- catstopTime - catstartTime

calculate the prediction:

#get the prediction
catprediction <- catboost.predict(model, 
                                  pool, 
                                  prediction_type = 'RawFormulaVal')

calculate the program score:

#round the prediction
catprediction <- round(catprediction,0)

catTable <- table(train_set$Str_h_texture,catprediction)

catTablerow <- rownames(catTable)
catTablecol <- colnames(catTable)
catscore <- sumElementinTable(catTable,catTablerow,catTablecol)/sum(catTable)
cat('The algorithm takes' ,cattakenTime , 'seconds')
The algorithm takes 4.908873 seconds
cat('The algorithm scores' ,catscore)
The algorithm scores 0.02403448

naivebayes classification*


nbstarttime <- Sys.time()
  
nbClassifier <- naiveBayes(as.factor(Str_h_texture) ~ .,data = train_set,laplace=2)
nbTestPrediction <- predict(nbClassifier,test_set,type = "class")
nbTableTest <- table(nbTestPrediction,test_set$Str_h_texture)

nbTestTablerow <- rownames(nbTableTest)
nbTestTablecol <- colnames(nbTableTest)
nbTestTablescore<- sumElementinTable(nbTableTest,nbTestTablerow,nbTestTablecol)/sum(nbTableTest)

nbendtime <- Sys.time()

nbTrainPrediction <- predict(nbClassifier,train_set,type = "class")

nbalgorithm

cat('NaiveBayes takes',nbtakentime,'seconds')
NaiveBayes takes 4.305502 seconds

nbscore

cat('NaiveBayes score',nbTrainTablescore)
NaiveBayes score 355

fastNaiveBayes algorithms by gaussian

fnbstartTime <- Sys.time()
dist <- fnb.detect_distribution(train_set.num_X)
gauss <- fnb.gaussian(train_set.num_X[,dist$gaussian], as.factor(train_set$Str_h_texture),sparse = TRUE,check = FALSE)
pred <- predict(gauss, train_set.num_X[,dist$gaussian])
fnbendTime <- Sys.time()
error <- mean(as.factor(train_set$Str_h_texture)!=pred)
print(error)
[1] 0.9383002
fnbtakentime <- fnbendTime - fnbstartTime

cat("fastNaiveBayes takes ", round(fnbtakentime,6), "seconds")
fastNaiveBayes takes  27.94227 seconds

MLP algorithm (a subsitute algorithm for nn classifier)

Data preprocessing

train_set.norm <- train_set
maxStr_h_texture <- max(train_set.norm$Str_h_texture)
minStr_h_texture <- min(train_set.norm$Str_h_texture)
train_set.norm$Str_h_texture <- normalize(train_set.norm$Str_h_texture)
train_set.norm.X <- train_set.norm[,-1]

test_set.norm <- test_set
maxteStr_h_texture <- max(test_set.norm$Str_h_texture)
minteStr_h_texture <- min(test_set.norm$Str_h_texture)
test_set.norm$Str_h_texture <- normalize(test_set.norm$Str_h_texture)
test_set.norm.X <- test_set.norm[,-1]
mlpstarttime <- Sys.time()

model <- mlp(train_set.norm.X, train_set.norm$Str_h_texture, size=5, learnFuncParams=c(0.1), 
             maxit=50, inputsTest=test_set.norm.X, targetsTest=test_set.norm$Str_h_texture)

summary(model)
SNNS network definition file V1.4-3D
generated at Thu May 14 15:32:26 2020

network name : RSNNS_untitled
source files :
no. of units : 606
no. of connections : 3005
no. of unit types : 0
no. of site types : 0


learning function : Std_Backpropagation
update function   : Topological_Order


unit default section :

act      | bias     | st | subnet | layer | act func     | out func
---------|----------|----|--------|-------|--------------|-------------
 0.00000 |  0.00000 | i  |      0 |     1 | Act_Logistic | Out_Identity 
---------|----------|----|--------|-------|--------------|-------------


unit definition section :

no. | typeName | unitName               | act      | bias     | st | position    | act func     | out func | sites
----|----------|------------------------|----------|----------|----|-------------|--------------|----------|-------
  1 |          | Input_Str_samp_no      |  0.00000 | -0.27258 | i  |   1,  0,  0 | Act_Identity |          | 
  2 |          | Input_Str_labr_no      |  0.00000 |  0.06946 | i  |   2,  0,  0 | Act_Identity |          | 
  3 |          | Input_Str_X1.40E.02    |  0.00000 |  0.23725 | i  |   3,  0,  0 | Act_Identity |          | 
  4 |          | Input_Str_X1.40E.04    |  0.00000 |  0.06173 | i  |   4,  0,  0 | Act_Identity |          | 
  5 |          | Input_Str_X1.80E.03    |  0.00000 | -0.10419 | i  |   5,  0,  0 | Act_Identity |          | 
  6 |          | Input_Str_X10_BC       |  0.00000 | -0.18690 | i  |   6,  0,  0 | Act_Identity |          | 
  7 |          | Input_Str_X10A_NR      |  0.00000 | -0.24656 | i  |   7,  0,  0 | Act_Identity |          | 
  8 |          | Input_Str_X10A1        |  0.00000 | -0.27116 | i  |   8,  0,  0 | Act_Identity |          | 
  9 |          | Input_Str_X10B         |  0.00000 |  0.24816 | i  |   9,  0,  0 | Act_Identity |          | 
 10 |          | Input_Str_X10B_NR      |  0.00000 |  0.06979 | i  |  10,  0,  0 | Act_Identity |          | 
 11 |          | Input_Str_X10B1        |  0.00000 | -0.03419 | i  |  11,  0,  0 | Act_Identity |          | 
 12 |          | Input_Str_X10B3        |  0.00000 |  0.28084 | i  |  12,  0,  0 | Act_Identity |          | 
 13 |          | Input_Str_X10D1        |  0.00000 |  0.28388 | i  |  13,  0,  0 | Act_Identity |          | 
 14 |          | Input_Str_X11A1        |  0.00000 |  0.11003 | i  |  14,  0,  0 | Act_Identity |          | 
 15 |          | Input_Str_X12_HCL_CU   |  0.00000 | -0.09536 | i  |  15,  0,  0 | Act_Identity |          | 
 16 |          | Input_Str_X12_HCL_FE   |  0.00000 |  0.05696 | i  |  16,  0,  0 | Act_Identity |          | 
 17 |          | Input_Str_X12_HCL_MN   |  0.00000 |  0.29094 | i  |  17,  0,  0 | Act_Identity |          | 
 18 |          | Input_Str_X12_HCL_ZN   |  0.00000 | -0.03434 | i  |  18,  0,  0 | Act_Identity |          | 
 19 |          | Input_Str_X12_HF_CU    |  0.00000 |  0.28922 | i  |  19,  0,  0 | Act_Identity |          | 
 20 |          | Input_Str_X12_HF_FE    |  0.00000 | -0.00433 | i  |  20,  0,  0 | Act_Identity |          | 
 21 |          | Input_Str_X12_HF_MN    |  0.00000 | -0.07207 | i  |  21,  0,  0 | Act_Identity |          | 
 22 |          | Input_Str_X12_HF_ZN    |  0.00000 | -0.28903 | i  |  22,  0,  0 | Act_Identity |          | 
 23 |          | Input_Str_X12_NR_CU    |  0.00000 |  0.17272 | i  |  23,  0,  0 | Act_Identity |          | 
 24 |          | Input_Str_X12_NR_FE    |  0.00000 | -0.08365 | i  |  24,  0,  0 | Act_Identity |          | 
 25 |          | Input_Str_X12_NR_MN    |  0.00000 |  0.28085 | i  |  25,  0,  0 | Act_Identity |          | 
 26 |          | Input_Str_X12_NR_ZN    |  0.00000 |  0.24954 | i  |  26,  0,  0 | Act_Identity |          | 
 27 |          | Input_Str_X12_XRF_CU   |  0.00000 |  0.19127 | i  |  27,  0,  0 | Act_Identity |          | 
 28 |          | Input_Str_X12_XRF_FE   |  0.00000 |  0.24166 | i  |  28,  0,  0 | Act_Identity |          | 
 29 |          | Input_Str_X12_XRF_MN   |  0.00000 |  0.21554 | i  |  29,  0,  0 | Act_Identity |          | 
 30 |          | Input_Str_X12_XRF_ZN   |  0.00000 |  0.15703 | i  |  30,  0,  0 | Act_Identity |          | 
 31 |          | Input_Str_X12A1_CD     |  0.00000 | -0.01010 | i  |  31,  0,  0 | Act_Identity |          | 
 32 |          | Input_Str_X12A1_CO     |  0.00000 | -0.16547 | i  |  32,  0,  0 | Act_Identity |          | 
 33 |          | Input_Str_X12A1_Cu     |  0.00000 |  0.05173 | i  |  33,  0,  0 | Act_Identity |          | 
 34 |          | Input_Str_X12A1_CU     |  0.00000 | -0.21992 | i  |  34,  0,  0 | Act_Identity |          | 
 35 |          | Input_Str_X12A1_Fe     |  0.00000 | -0.07050 | i  |  35,  0,  0 | Act_Identity |          | 
 36 |          | Input_Str_X12A1_FE     |  0.00000 | -0.25373 | i  |  36,  0,  0 | Act_Identity |          | 
 37 |          | Input_Str_X12A1_Mn     |  0.00000 |  0.10477 | i  |  37,  0,  0 | Act_Identity |          | 
 38 |          | Input_Str_X12A1_MN     |  0.00000 | -0.22350 | i  |  38,  0,  0 | Act_Identity |          | 
 39 |          | Input_Str_X12A1_PB     |  0.00000 |  0.23410 | i  |  39,  0,  0 | Act_Identity |          | 
 40 |          | Input_Str_X12A1_Zn     |  0.00000 |  0.28892 | i  |  40,  0,  0 | Act_Identity |          | 
 41 |          | Input_Str_X12A1_ZN     |  0.00000 |  0.09875 | i  |  41,  0,  0 | Act_Identity |          | 
 42 |          | Input_Str_X12B1_CU     |  0.00000 | -0.13303 | i  |  42,  0,  0 | Act_Identity |          | 
 43 |          | Input_Str_X12B1_ZN     |  0.00000 | -0.01564 | i  |  43,  0,  0 | Act_Identity |          | 
 44 |          | Input_Str_X12B2_CD     |  0.00000 | -0.29526 | i  |  44,  0,  0 | Act_Identity |          | 
 45 |          | Input_Str_X12B2_CU     |  0.00000 |  0.22828 | i  |  45,  0,  0 | Act_Identity |          | 
 46 |          | Input_Str_X12B2_PB     |  0.00000 | -0.29962 | i  |  46,  0,  0 | Act_Identity |          | 
 47 |          | Input_Str_X12B2_ZN     |  0.00000 |  0.13066 | i  |  47,  0,  0 | Act_Identity |          | 
 48 |          | Input_Str_X12C1        |  0.00000 |  0.13117 | i  |  48,  0,  0 | Act_Identity |          | 
 49 |          | Input_Str_X12C2        |  0.00000 | -0.17412 | i  |  49,  0,  0 | Act_Identity |          | 
 50 |          | Input_Str_X13_C_FE     |  0.00000 | -0.18161 | i  |  50,  0,  0 | Act_Identity |          | 
 51 |          | Input_Str_X13_NR_AL    |  0.00000 |  0.03981 | i  |  51,  0,  0 | Act_Identity |          | 
 52 |          | Input_Str_X13_NR_FE    |  0.00000 |  0.00260 | i  |  52,  0,  0 | Act_Identity |          | 
 53 |          | Input_Str_X13_NR_MN    |  0.00000 |  0.28979 | i  |  53,  0,  0 | Act_Identity |          | 
 54 |          | Input_Str_X13A1_AL     |  0.00000 |  0.18651 | i  |  54,  0,  0 | Act_Identity |          | 
 55 |          | Input_Str_X13A1_FE     |  0.00000 | -0.25075 | i  |  55,  0,  0 | Act_Identity |          | 
 56 |          | Input_Str_X13A1_MN     |  0.00000 |  0.03059 | i  |  56,  0,  0 | Act_Identity |          | 
 57 |          | Input_Str_X13A1_SI     |  0.00000 | -0.21869 | i  |  57,  0,  0 | Act_Identity |          | 
 58 |          | Input_Str_X13B1_AL     |  0.00000 | -0.25236 | i  |  58,  0,  0 | Act_Identity |          | 
 59 |          | Input_Str_X13B1_FE     |  0.00000 |  0.08336 | i  |  59,  0,  0 | Act_Identity |          | 
 60 |          | Input_Str_X13C_C_FE    |  0.00000 | -0.28898 | i  |  60,  0,  0 | Act_Identity |          | 
 61 |          | Input_Str_X13C1_AL     |  0.00000 | -0.26924 | i  |  61,  0,  0 | Act_Identity |          | 
 62 |          | Input_Str_X13C1_FE     |  0.00000 |  0.15872 | i  |  62,  0,  0 | Act_Identity |          | 
 63 |          | Input_Str_X13C1_FE203  |  0.00000 |  0.24305 | i  |  63,  0,  0 | Act_Identity |          | 
 64 |          | Input_Str_X13C1_MN     |  0.00000 | -0.17605 | i  |  64,  0,  0 | Act_Identity |          | 
 65 |          | Input_Str_X13C1_SI     |  0.00000 |  0.28677 | i  |  65,  0,  0 | Act_Identity |          | 
 66 |          | Input_Str_X14_NR_S     |  0.00000 | -0.17459 | i  |  66,  0,  0 | Act_Identity |          | 
 67 |          | Input_Str_X140         |  0.00000 | -0.26045 | i  |  67,  0,  0 | Act_Identity |          | 
 68 |          | Input_Str_X14B1        |  0.00000 | -0.00156 | i  |  68,  0,  0 | Act_Identity |          | 
 69 |          | Input_Str_X14C1        |  0.00000 |  0.08435 | i  |  69,  0,  0 | Act_Identity |          | 
 70 |          | Input_Str_X14D1_C      |  0.00000 |  0.22726 | i  |  70,  0,  0 | Act_Identity |          | 
 71 |          | Input_Str_X14D2_BC     |  0.00000 | -0.04861 | i  |  71,  0,  0 | Act_Identity |          | 
 72 |          | Input_Str_X14F1        |  0.00000 |  0.24815 | i  |  72,  0,  0 | Act_Identity |          | 
 73 |          | Input_Str_X14H1_CA     |  0.00000 |  0.04056 | i  |  73,  0,  0 | Act_Identity |          | 
 74 |          | Input_Str_X14H1_K      |  0.00000 | -0.00980 | i  |  74,  0,  0 | Act_Identity |          | 
 75 |          | Input_Str_X14H1_MG     |  0.00000 |  0.16589 | i  |  75,  0,  0 | Act_Identity |          | 
 76 |          | Input_Str_X14H1_NA     |  0.00000 | -0.27958 | i  |  76,  0,  0 | Act_Identity |          | 
 77 |          | Input_Str_X15_BASES    |  0.00000 |  0.02071 | i  |  77,  0,  0 | Act_Identity |          | 
 78 |          | Input_Str_X15_HSK_CEC  |  0.00000 | -0.25161 | i  |  78,  0,  0 | Act_Identity |          | 
 79 |          | Input_Str_X15_NR       |  0.00000 | -0.01070 | i  |  79,  0,  0 | Act_Identity |          | 
 80 |          | Input_Str_X15_NR_AL    |  0.00000 |  0.22250 | i  |  80,  0,  0 | Act_Identity |          | 
 81 |          | Input_Str_X15_NR_BSa   |  0.00000 | -0.18861 | i  |  81,  0,  0 | Act_Identity |          | 
 82 |          | Input_Str_X15_NR_BSP   |  0.00000 |  0.24917 | i  |  82,  0,  0 | Act_Identity |          | 
 83 |          | Input_Str_X15_NR_CA    |  0.00000 |  0.25714 | i  |  83,  0,  0 | Act_Identity |          | 
 84 |          | Input_Str_X15_NR_CEC   |  0.00000 |  0.21954 | i  |  84,  0,  0 | Act_Identity |          | 
 85 |          | Input_Str_X15_NR_CMR   |  0.00000 | -0.08657 | i  |  85,  0,  0 | Act_Identity |          | 
 86 |          | Input_Str_X15_NR_ESP   |  0.00000 | -0.00947 | i  |  86,  0,  0 | Act_Identity |          | 
 87 |          | Input_Str_X15_NR_H     |  0.00000 | -0.09881 | i  |  87,  0,  0 | Act_Identity |          | 
 88 |          | Input_Str_X15_NR_K     |  0.00000 | -0.24798 | i  |  88,  0,  0 | Act_Identity |          | 
 89 |          | Input_Str_X15_NR_MG    |  0.00000 | -0.11995 | i  |  89,  0,  0 | Act_Identity |          | 
 90 |          | Input_Str_X15_NR_MN    |  0.00000 |  0.07460 | i  |  90,  0,  0 | Act_Identity |          | 
 91 |          | Input_Str_X15_NR_NA    |  0.00000 |  0.21977 | i  |  91,  0,  0 | Act_Identity |          | 
 92 |          | Input_Str_X15A1_CA     |  0.00000 | -0.23548 | i  |  92,  0,  0 | Act_Identity |          | 
 93 |          | Input_Str_X15A1_CEC    |  0.00000 | -0.16904 | i  |  93,  0,  0 | Act_Identity |          | 
 94 |          | Input_Str_X15A1_K      |  0.00000 |  0.23147 | i  |  94,  0,  0 | Act_Identity |          | 
 95 |          | Input_Str_X15A1_MG     |  0.00000 | -0.08637 | i  |  95,  0,  0 | Act_Identity |          | 
 96 |          | Input_Str_X15A1_MN     |  0.00000 |  0.11919 | i  |  96,  0,  0 | Act_Identity |          | 
 97 |          | Input_Str_X15A1_NA     |  0.00000 | -0.14328 | i  |  97,  0,  0 | Act_Identity |          | 
 98 |          | Input_Str_X15A2_CA     |  0.00000 |  0.15249 | i  |  98,  0,  0 | Act_Identity |          | 
 99 |          | Input_Str_X15A2_CEC    |  0.00000 |  0.16314 | i  |  99,  0,  0 | Act_Identity |          | 
100 |          | Input_Str_X15A2_K      |  0.00000 | -0.29305 | i  | 100,  0,  0 | Act_Identity |          | 
101 |          | Input_Str_X15A2_MG     |  0.00000 |  0.02862 | i  | 101,  0,  0 | Act_Identity |          | 
102 |          | Input_Str_X15A2_NA     |  0.00000 | -0.11867 | i  | 102,  0,  0 | Act_Identity |          | 
103 |          | Input_Str_X15A3_NA     |  0.00000 |  0.25770 | i  | 103,  0,  0 | Act_Identity |          | 
104 |          | Input_Str_X15B1_CA     |  0.00000 |  0.10375 | i  | 104,  0,  0 | Act_Identity |          | 
105 |          | Input_Str_X15B1_CEC    |  0.00000 | -0.06241 | i  | 105,  0,  0 | Act_Identity |          | 
106 |          | Input_Str_X15B1_K      |  0.00000 | -0.21384 | i  | 106,  0,  0 | Act_Identity |          | 
107 |          | Input_Str_X15B1_MG     |  0.00000 | -0.00269 | i  | 107,  0,  0 | Act_Identity |          | 
108 |          | Input_Str_X15B1_NA     |  0.00000 |  0.14918 | i  | 108,  0,  0 | Act_Identity |          | 
109 |          | Input_Str_X15B2_CA     |  0.00000 |  0.28288 | i  | 109,  0,  0 | Act_Identity |          | 
110 |          | Input_Str_X15B2_CEC    |  0.00000 | -0.13351 | i  | 110,  0,  0 | Act_Identity |          | 
111 |          | Input_Str_X15B2_K      |  0.00000 |  0.11066 | i  | 111,  0,  0 | Act_Identity |          | 
112 |          | Input_Str_X15B2_MG     |  0.00000 |  0.01357 | i  | 112,  0,  0 | Act_Identity |          | 
113 |          | Input_Str_X15B2_NA     |  0.00000 | -0.15645 | i  | 113,  0,  0 | Act_Identity |          | 
114 |          | Input_Str_X15C1_CA     |  0.00000 |  0.04274 | i  | 114,  0,  0 | Act_Identity |          | 
115 |          | Input_Str_X15C1_CEC    |  0.00000 | -0.20298 | i  | 115,  0,  0 | Act_Identity |          | 
116 |          | Input_Str_X15C1_K      |  0.00000 | -0.16222 | i  | 116,  0,  0 | Act_Identity |          | 
117 |          | Input_Str_X15C1_MG     |  0.00000 |  0.23145 | i  | 117,  0,  0 | Act_Identity |          | 
118 |          | Input_Str_X15C1_NA     |  0.00000 | -0.05436 | i  | 118,  0,  0 | Act_Identity |          | 
119 |          | Input_Str_X15C1mod_CA  |  0.00000 | -0.12146 | i  | 119,  0,  0 | Act_Identity |          | 
120 |          | Input_Str_X15C1mod_K   |  0.00000 |  0.15511 | i  | 120,  0,  0 | Act_Identity |          | 
121 |          | Input_Str_X15C1mod_MG  |  0.00000 |  0.24041 | i  | 121,  0,  0 | Act_Identity |          | 
122 |          | Input_Str_X15C1mod_NA  |  0.00000 | -0.09911 | i  | 122,  0,  0 | Act_Identity |          | 
123 |          | Input_Str_X15C1modCEC  |  0.00000 | -0.01750 | i  | 123,  0,  0 | Act_Identity |          | 
124 |          | Input_Str_X15D1_AL     |  0.00000 | -0.06458 | i  | 124,  0,  0 | Act_Identity |          | 
125 |          | Input_Str_X15D1_CA     |  0.00000 | -0.18594 | i  | 125,  0,  0 | Act_Identity |          | 
126 |          | Input_Str_X15D1_CEC    |  0.00000 |  0.11188 | i  | 126,  0,  0 | Act_Identity |          | 
127 |          | Input_Str_X15D1_K      |  0.00000 | -0.09905 | i  | 127,  0,  0 | Act_Identity |          | 
128 |          | Input_Str_X15D1_MG     |  0.00000 |  0.21690 | i  | 128,  0,  0 | Act_Identity |          | 
129 |          | Input_Str_X15D1_NA     |  0.00000 |  0.01234 | i  | 129,  0,  0 | Act_Identity |          | 
130 |          | Input_Str_X15D2_CA     |  0.00000 |  0.29113 | i  | 130,  0,  0 | Act_Identity |          | 
131 |          | Input_Str_X15D2_CEC    |  0.00000 |  0.09137 | i  | 131,  0,  0 | Act_Identity |          | 
132 |          | Input_Str_X15D2_K      |  0.00000 | -0.28766 | i  | 132,  0,  0 | Act_Identity |          | 
133 |          | Input_Str_X15D2_MG     |  0.00000 |  0.00844 | i  | 133,  0,  0 | Act_Identity |          | 
134 |          | Input_Str_X15D2_NA     |  0.00000 | -0.03936 | i  | 134,  0,  0 | Act_Identity |          | 
135 |          | Input_Str_X15D3_CA     |  0.00000 |  0.16221 | i  | 135,  0,  0 | Act_Identity |          | 
136 |          | Input_Str_X15D3_CEC    |  0.00000 | -0.01487 | i  | 136,  0,  0 | Act_Identity |          | 
137 |          | Input_Str_X15D3_K      |  0.00000 | -0.16312 | i  | 137,  0,  0 | Act_Identity |          | 
138 |          | Input_Str_X15D3_MG     |  0.00000 |  0.22327 | i  | 138,  0,  0 | Act_Identity |          | 
139 |          | Input_Str_X15D3_NA     |  0.00000 | -0.11933 | i  | 139,  0,  0 | Act_Identity |          | 
140 |          | Input_Str_X15E1_AL     |  0.00000 |  0.02806 | i  | 140,  0,  0 | Act_Identity |          | 
141 |          | Input_Str_X15E1_CA     |  0.00000 |  0.08162 | i  | 141,  0,  0 | Act_Identity |          | 
142 |          | Input_Str_X15E1_CEC    |  0.00000 | -0.13127 | i  | 142,  0,  0 | Act_Identity |          | 
143 |          | Input_Str_X15E1_H      |  0.00000 |  0.12627 | i  | 143,  0,  0 | Act_Identity |          | 
144 |          | Input_Str_X15E1_K      |  0.00000 |  0.20116 | i  | 144,  0,  0 | Act_Identity |          | 
145 |          | Input_Str_X15E1_MG     |  0.00000 | -0.13104 | i  | 145,  0,  0 | Act_Identity |          | 
146 |          | Input_Str_X15E1_MN     |  0.00000 | -0.09538 | i  | 146,  0,  0 | Act_Identity |          | 
147 |          | Input_Str_X15E1_NA     |  0.00000 | -0.05283 | i  | 147,  0,  0 | Act_Identity |          | 
148 |          | Input_Str_X15E1mod_AL  |  0.00000 |  0.15236 | i  | 148,  0,  0 | Act_Identity |          | 
149 |          | Input_Str_X15E1mod_CA  |  0.00000 |  0.23263 | i  | 149,  0,  0 | Act_Identity |          | 
150 |          | Input_Str_X15E1mod_K   |  0.00000 |  0.28673 | i  | 150,  0,  0 | Act_Identity |          | 
151 |          | Input_Str_X15E1mod_MG  |  0.00000 |  0.29932 | i  | 151,  0,  0 | Act_Identity |          | 
152 |          | Input_Str_X15E1mod_MN  |  0.00000 | -0.08221 | i  | 152,  0,  0 | Act_Identity |          | 
153 |          | Input_Str_X15E1mod_NA  |  0.00000 |  0.23471 | i  | 153,  0,  0 | Act_Identity |          | 
154 |          | Input_Str_X15E2_CA     |  0.00000 |  0.23419 | i  | 154,  0,  0 | Act_Identity |          | 
155 |          | Input_Str_X15E2_K      |  0.00000 |  0.23305 | i  | 155,  0,  0 | Act_Identity |          | 
156 |          | Input_Str_X15E2_MG     |  0.00000 | -0.26598 | i  | 156,  0,  0 | Act_Identity |          | 
157 |          | Input_Str_X15E2_NA     |  0.00000 |  0.28838 | i  | 157,  0,  0 | Act_Identity |          | 
158 |          | Input_Str_X15E2mod_AL  |  0.00000 | -0.06615 | i  | 158,  0,  0 | Act_Identity |          | 
159 |          | Input_Str_X15E2mod_CA  |  0.00000 | -0.20410 | i  | 159,  0,  0 | Act_Identity |          | 
160 |          | Input_Str_X15E2mod_K   |  0.00000 | -0.25408 | i  | 160,  0,  0 | Act_Identity |          | 
161 |          | Input_Str_X15E2mod_MG  |  0.00000 | -0.26174 | i  | 161,  0,  0 | Act_Identity |          | 
162 |          | Input_Str_X15E2mod_MN  |  0.00000 |  0.14709 | i  | 162,  0,  0 | Act_Identity |          | 
163 |          | Input_Str_X15E2mod_NA  |  0.00000 |  0.22070 | i  | 163,  0,  0 | Act_Identity |          | 
164 |          | Input_Str_X15F1_CA     |  0.00000 | -0.09290 | i  | 164,  0,  0 | Act_Identity |          | 
165 |          | Input_Str_X15F1_CEC    |  0.00000 | -0.12831 | i  | 165,  0,  0 | Act_Identity |          | 
166 |          | Input_Str_X15F1_K      |  0.00000 | -0.04783 | i  | 166,  0,  0 | Act_Identity |          | 
167 |          | Input_Str_X15F1_MG     |  0.00000 |  0.11063 | i  | 167,  0,  0 | Act_Identity |          | 
168 |          | Input_Str_X15F1_NA     |  0.00000 | -0.05910 | i  | 168,  0,  0 | Act_Identity |          | 
169 |          | Input_Str_X15F2        |  0.00000 |  0.17137 | i  | 169,  0,  0 | Act_Identity |          | 
170 |          | Input_Str_X15F2_AL     |  0.00000 | -0.18194 | i  | 170,  0,  0 | Act_Identity |          | 
171 |          | Input_Str_X15F3        |  0.00000 | -0.02972 | i  | 171,  0,  0 | Act_Identity |          | 
172 |          | Input_Str_X15F4        |  0.00000 | -0.27373 | i  | 172,  0,  0 | Act_Identity |          | 
173 |          | Input_Str_X15G_C       |  0.00000 | -0.16739 | i  | 173,  0,  0 | Act_Identity |          | 
174 |          | Input_Str_X15G_C_AL1   |  0.00000 |  0.02380 | i  | 174,  0,  0 | Act_Identity |          | 
175 |          | Input_Str_X15G_C_AL2   |  0.00000 |  0.15656 | i  | 175,  0,  0 | Act_Identity |          | 
176 |          | Input_Str_X15G_C_H1    |  0.00000 |  0.00338 | i  | 176,  0,  0 | Act_Identity |          | 
177 |          | Input_Str_X15G_D       |  0.00000 | -0.10182 | i  | 177,  0,  0 | Act_Identity |          | 
178 |          | Input_Str_X15G_H       |  0.00000 |  0.10664 | i  | 178,  0,  0 | Act_Identity |          | 
179 |          | Input_Str_X15G1        |  0.00000 |  0.05755 | i  | 179,  0,  0 | Act_Identity |          | 
180 |          | Input_Str_X15G1_AL     |  0.00000 |  0.15477 | i  | 180,  0,  0 | Act_Identity |          | 
181 |          | Input_Str_X15G1_H      |  0.00000 | -0.17503 | i  | 181,  0,  0 | Act_Identity |          | 
182 |          | Input_Str_X15I2        |  0.00000 | -0.11098 | i  | 182,  0,  0 | Act_Identity |          | 
183 |          | Input_Str_X15I3        |  0.00000 | -0.21505 | i  | 183,  0,  0 | Act_Identity |          | 
184 |          | Input_Str_X15I4        |  0.00000 |  0.19470 | i  | 184,  0,  0 | Act_Identity |          | 
185 |          | Input_Str_X15J_BASES   |  0.00000 |  0.28933 | i  | 185,  0,  0 | Act_Identity |          | 
186 |          | Input_Str_X15J_C       |  0.00000 |  0.22366 | i  | 186,  0,  0 | Act_Identity |          | 
187 |          | Input_Str_X15J_H       |  0.00000 | -0.09023 | i  | 187,  0,  0 | Act_Identity |          | 
188 |          | Input_Str_X15J1        |  0.00000 |  0.03928 | i  | 188,  0,  0 | Act_Identity |          | 
189 |          | Input_Str_X15J2_MCLW   |  0.00000 |  0.12399 | i  | 189,  0,  0 | Act_Identity |          | 
190 |          | Input_Str_X15K1        |  0.00000 | -0.18760 | i  | 190,  0,  0 | Act_Identity |          | 
191 |          | Input_Str_X15L1        |  0.00000 | -0.18182 | i  | 191,  0,  0 | Act_Identity |          | 
192 |          | Input_Str_X15L1_a      |  0.00000 |  0.11540 | i  | 192,  0,  0 | Act_Identity |          | 
193 |          | Input_Str_X15M1_CMR    |  0.00000 | -0.13689 | i  | 193,  0,  0 | Act_Identity |          | 
194 |          | Input_Str_X15M1_K.Mg   |  0.00000 |  0.00844 | i  | 194,  0,  0 | Act_Identity |          | 
195 |          | Input_Str_X15M1AlECEC  |  0.00000 | -0.09809 | i  | 195,  0,  0 | Act_Identity |          | 
196 |          | Input_Str_X15M1CaCEC   |  0.00000 |  0.21286 | i  | 196,  0,  0 | Act_Identity |          | 
197 |          | Input_Str_X15M1CaECEC  |  0.00000 |  0.17888 | i  | 197,  0,  0 | Act_Identity |          | 
198 |          | Input_Str_X15M1KCEC    |  0.00000 | -0.18177 | i  | 198,  0,  0 | Act_Identity |          | 
199 |          | Input_Str_X15M1KECEC   |  0.00000 |  0.10506 | i  | 199,  0,  0 | Act_Identity |          | 
200 |          | Input_Str_X15M1MgCEC   |  0.00000 |  0.25306 | i  | 200,  0,  0 | Act_Identity |          | 
201 |          | Input_Str_X15M1MgECEC  |  0.00000 |  0.25435 | i  | 201,  0,  0 | Act_Identity |          | 
202 |          | Input_Str_X15N1        |  0.00000 | -0.13708 | i  | 202,  0,  0 | Act_Identity |          | 
203 |          | Input_Str_X15N1_a      |  0.00000 | -0.22411 | i  | 203,  0,  0 | Act_Identity |          | 
204 |          | Input_Str_X15N1_b      |  0.00000 |  0.03913 | i  | 204,  0,  0 | Act_Identity |          | 
205 |          | Input_Str_X15O1        |  0.00000 | -0.22446 | i  | 205,  0,  0 | Act_Identity |          | 
206 |          | Input_Str_X17A_HF.     |  0.00000 |  0.14517 | i  | 206,  0,  0 | Act_Identity |          | 
207 |          | Input_Str_X17A_NR      |  0.00000 |  0.19779 | i  | 207,  0,  0 | Act_Identity |          | 
208 |          | Input_Str_X17A1        |  0.00000 | -0.01464 | i  | 208,  0,  0 | Act_Identity |          | 
209 |          | Input_Str_X17A3_CA     |  0.00000 |  0.24797 | i  | 209,  0,  0 | Act_Identity |          | 
210 |          | Input_Str_X17A3_MG     |  0.00000 |  0.15194 | i  | 210,  0,  0 | Act_Identity |          | 
211 |          | Input_Str_X17A3_NA     |  0.00000 | -0.01425 | i  | 211,  0,  0 | Act_Identity |          | 
212 |          | Input_Str_X17A3_S      |  0.00000 |  0.24215 | i  | 212,  0,  0 | Act_Identity |          | 
213 |          | Input_Str_X17D1_CR     |  0.00000 |  0.23330 | i  | 213,  0,  0 | Act_Identity |          | 
214 |          | Input_Str_X17D1_CU     |  0.00000 | -0.18390 | i  | 214,  0,  0 | Act_Identity |          | 
215 |          | Input_Str_X17D1_FE     |  0.00000 | -0.01097 | i  | 215,  0,  0 | Act_Identity |          | 
216 |          | Input_Str_X17D1_MN     |  0.00000 |  0.24501 | i  | 216,  0,  0 | Act_Identity |          | 
217 |          | Input_Str_X17D1_NI     |  0.00000 |  0.08205 | i  | 217,  0,  0 | Act_Identity |          | 
218 |          | Input_Str_X17D1_PB     |  0.00000 |  0.00750 | i  | 218,  0,  0 | Act_Identity |          | 
219 |          | Input_Str_X17D1_ZN     |  0.00000 | -0.13770 | i  | 219,  0,  0 | Act_Identity |          | 
220 |          | Input_Str_X18_NR       |  0.00000 | -0.08164 | i  | 220,  0,  0 | Act_Identity |          | 
221 |          | Input_Str_X18_NR_K     |  0.00000 |  0.13702 | i  | 221,  0,  0 | Act_Identity |          | 
222 |          | Input_Str_X18A1        |  0.00000 | -0.04826 | i  | 222,  0,  0 | Act_Identity |          | 
223 |          | Input_Str_X18A1_NR     |  0.00000 | -0.17387 | i  | 223,  0,  0 | Act_Identity |          | 
224 |          | Input_Str_X18A1mod     |  0.00000 | -0.16182 | i  | 224,  0,  0 | Act_Identity |          | 
225 |          | Input_Str_X18B1        |  0.00000 | -0.12729 | i  | 225,  0,  0 | Act_Identity |          | 
226 |          | Input_Str_X18B2        |  0.00000 | -0.03864 | i  | 226,  0,  0 | Act_Identity |          | 
227 |          | Input_Str_X18F1_Al     |  0.00000 |  0.07971 | i  | 227,  0,  0 | Act_Identity |          | 
228 |          | Input_Str_X18F1_AL     |  0.00000 | -0.22650 | i  | 228,  0,  0 | Act_Identity |          | 
229 |          | Input_Str_X18F1_As     |  0.00000 |  0.25112 | i  | 229,  0,  0 | Act_Identity |          | 
230 |          | Input_Str_X18F1_AS     |  0.00000 |  0.13628 | i  | 230,  0,  0 | Act_Identity |          | 
231 |          | Input_Str_X18F1_B      |  0.00000 |  0.24618 | i  | 231,  0,  0 | Act_Identity |          | 
232 |          | Input_Str_X18F1_Ca     |  0.00000 |  0.17449 | i  | 232,  0,  0 | Act_Identity |          | 
233 |          | Input_Str_X18F1_CA     |  0.00000 |  0.04738 | i  | 233,  0,  0 | Act_Identity |          | 
234 |          | Input_Str_X18F1_Cd     |  0.00000 |  0.23713 | i  | 234,  0,  0 | Act_Identity |          | 
235 |          | Input_Str_X18F1_CD     |  0.00000 | -0.15337 | i  | 235,  0,  0 | Act_Identity |          | 
236 |          | Input_Str_X18F1_Co     |  0.00000 |  0.08494 | i  | 236,  0,  0 | Act_Identity |          | 
237 |          | Input_Str_X18F1_CO     |  0.00000 | -0.26108 | i  | 237,  0,  0 | Act_Identity |          | 
238 |          | Input_Str_X18F1_Cu     |  0.00000 | -0.10676 | i  | 238,  0,  0 | Act_Identity |          | 
239 |          | Input_Str_X18F1_CU     |  0.00000 |  0.16444 | i  | 239,  0,  0 | Act_Identity |          | 
240 |          | Input_Str_X18F1_Fe     |  0.00000 |  0.01832 | i  | 240,  0,  0 | Act_Identity |          | 
241 |          | Input_Str_X18F1_FE     |  0.00000 |  0.17879 | i  | 241,  0,  0 | Act_Identity |          | 
242 |          | Input_Str_X18F1_K      |  0.00000 | -0.26541 | i  | 242,  0,  0 | Act_Identity |          | 
243 |          | Input_Str_X18F1_Mg     |  0.00000 | -0.22378 | i  | 243,  0,  0 | Act_Identity |          | 
244 |          | Input_Str_X18F1_MG     |  0.00000 |  0.21085 | i  | 244,  0,  0 | Act_Identity |          | 
245 |          | Input_Str_X18F1_Mn     |  0.00000 |  0.21126 | i  | 245,  0,  0 | Act_Identity |          | 
246 |          | Input_Str_X18F1_MN     |  0.00000 |  0.02897 | i  | 246,  0,  0 | Act_Identity |          | 
247 |          | Input_Str_X18F1_Mo     |  0.00000 |  0.09827 | i  | 247,  0,  0 | Act_Identity |          | 
248 |          | Input_Str_X18F1_MO     |  0.00000 | -0.13311 | i  | 248,  0,  0 | Act_Identity |          | 
249 |          | Input_Str_X18F1_Na     |  0.00000 |  0.15564 | i  | 249,  0,  0 | Act_Identity |          | 
250 |          | Input_Str_X18F1_NA     |  0.00000 | -0.07242 | i  | 250,  0,  0 | Act_Identity |          | 
251 |          | Input_Str_X18F1_Ni     |  0.00000 | -0.19884 | i  | 251,  0,  0 | Act_Identity |          | 
252 |          | Input_Str_X18F1_NI     |  0.00000 | -0.10409 | i  | 252,  0,  0 | Act_Identity |          | 
253 |          | Input_Str_X18F1_P      |  0.00000 |  0.15706 | i  | 253,  0,  0 | Act_Identity |          | 
254 |          | Input_Str_X18F1_Pb     |  0.00000 |  0.28992 | i  | 254,  0,  0 | Act_Identity |          | 
255 |          | Input_Str_X18F1_PB     |  0.00000 |  0.16379 | i  | 255,  0,  0 | Act_Identity |          | 
256 |          | Input_Str_X18F1_S      |  0.00000 | -0.27452 | i  | 256,  0,  0 | Act_Identity |          | 
257 |          | Input_Str_X18F1_Se     |  0.00000 | -0.13123 | i  | 257,  0,  0 | Act_Identity |          | 
258 |          | Input_Str_X18F1_SE     |  0.00000 | -0.10327 | i  | 258,  0,  0 | Act_Identity |          | 
259 |          | Input_Str_X18F1_Zn     |  0.00000 | -0.25151 | i  | 259,  0,  0 | Act_Identity |          | 
260 |          | Input_Str_X18F1_ZN     |  0.00000 |  0.12229 | i  | 260,  0,  0 | Act_Identity |          | 
261 |          | Input_Str_X18I1_CA     |  0.00000 |  0.24555 | i  | 261,  0,  0 | Act_Identity |          | 
262 |          | Input_Str_X18I1_MG     |  0.00000 | -0.27229 | i  | 262,  0,  0 | Act_Identity |          | 
263 |          | Input_Str_X18I1_NA     |  0.00000 |  0.22797 | i  | 263,  0,  0 | Act_Identity |          | 
264 |          | Input_Str_X18I1_S      |  0.00000 |  0.26858 | i  | 264,  0,  0 | Act_Identity |          | 
265 |          | Input_Str_X19_COL      |  0.00000 | -0.07271 | i  | 265,  0,  0 | Act_Identity |          | 
266 |          | Input_Str_X19A1        |  0.00000 | -0.15462 | i  | 266,  0,  0 | Act_Identity |          | 
267 |          | Input_Str_X19B_NR      |  0.00000 |  0.10855 | i  | 267,  0,  0 | Act_Identity |          | 
268 |          | Input_Str_X19B1        |  0.00000 |  0.01496 | i  | 268,  0,  0 | Act_Identity |          | 
269 |          | Input_Str_X19B2        |  0.00000 |  0.16468 | i  | 269,  0,  0 | Act_Identity |          | 
270 |          | Input_Str_X19F1        |  0.00000 |  0.24543 | i  | 270,  0,  0 | Act_Identity |          | 
271 |          | Input_Str_X19F1b       |  0.00000 |  0.13452 | i  | 271,  0,  0 | Act_Identity |          | 
272 |          | Input_Str_X2.00E.01    |  0.00000 |  0.14608 | i  | 272,  0,  0 | Act_Identity |          | 
273 |          | Input_Str_X2.00E.02    |  0.00000 |  0.00856 | i  | 273,  0,  0 | Act_Identity |          | 
274 |          | Input_Str_X2_LOI       |  0.00000 |  0.12818 | i  | 274,  0,  0 | Act_Identity |          | 
275 |          | Input_Str_X2A1         |  0.00000 |  0.18787 | i  | 275,  0,  0 | Act_Identity |          | 
276 |          | Input_Str_X2D1         |  0.00000 | -0.16172 | i  | 276,  0,  0 | Act_Identity |          | 
277 |          | Input_Str_X2Z1_R1      |  0.00000 | -0.02057 | i  | 277,  0,  0 | Act_Identity |          | 
278 |          | Input_Str_X2Z1_R2      |  0.00000 |  0.26428 | i  | 278,  0,  0 | Act_Identity |          | 
279 |          | Input_Str_X2Z2_C       |  0.00000 |  0.29825 | i  | 279,  0,  0 | Act_Identity |          | 
280 |          | Input_Str_X2Z2_CLAY    |  0.00000 | -0.09010 | i  | 280,  0,  0 | Act_Identity |          | 
281 |          | Input_Str_X2Z2_CS      |  0.00000 |  0.13086 | i  | 281,  0,  0 | Act_Identity |          | 
282 |          | Input_Str_X2Z2_FS      |  0.00000 |  0.14598 | i  | 282,  0,  0 | Act_Identity |          | 
283 |          | Input_Str_X2Z2_S       |  0.00000 |  0.00456 | i  | 283,  0,  0 | Act_Identity |          | 
284 |          | Input_Str_X2Z2_Z       |  0.00000 | -0.04798 | i  | 284,  0,  0 | Act_Identity |          | 
285 |          | Input_Str_X3_C_B       |  0.00000 | -0.16611 | i  | 285,  0,  0 | Act_Identity |          | 
286 |          | Input_Str_X3_NR        |  0.00000 | -0.16652 | i  | 286,  0,  0 | Act_Identity |          | 
287 |          | Input_Str_X3A_C_2.5    |  0.00000 |  0.05695 | i  | 287,  0,  0 | Act_Identity |          | 
288 |          | Input_Str_X3A_TSS      |  0.00000 |  0.24836 | i  | 288,  0,  0 | Act_Identity |          | 
289 |          | Input_Str_X3A1         |  0.00074 |  0.20143 | i  | 289,  0,  0 | Act_Identity |          | 
290 |          | Input_Str_X4_NR        |  0.00000 | -0.23983 | i  | 290,  0,  0 | Act_Identity |          | 
291 |          | Input_Str_X4A_C_1      |  0.00000 | -0.13716 | i  | 291,  0,  0 | Act_Identity |          | 
292 |          | Input_Str_X4A_C_2.5    |  0.00000 |  0.24128 | i  | 292,  0,  0 | Act_Identity |          | 
293 |          | Input_Str_X4A1         |  0.06600 |  0.12209 | i  | 293,  0,  0 | Act_Identity |          | 
294 |          | Input_Str_X4A1_MCLW    |  0.00000 |  0.04535 | i  | 294,  0,  0 | Act_Identity |          | 
295 |          | Input_Str_X4B_AL       |  0.00000 | -0.26954 | i  | 295,  0,  0 | Act_Identity |          | 
296 |          | Input_Str_X4B_AL_NR    |  0.00000 |  0.23837 | i  | 296,  0,  0 | Act_Identity |          | 
297 |          | Input_Str_X4B_C_2.5    |  0.00000 |  0.09999 | i  | 297,  0,  0 | Act_Identity |          | 
298 |          | Input_Str_X4B1         |  0.43279 | -0.07656 | i  | 298,  0,  0 | Act_Identity |          | 
299 |          | Input_Str_X4B2         |  0.00000 |  0.19046 | i  | 299,  0,  0 | Act_Identity |          | 
300 |          | Input_Str_X4B4         |  0.00000 |  0.10145 | i  | 300,  0,  0 | Act_Identity |          | 
301 |          | Input_Str_X4B5_MCLW    |  0.00000 | -0.28426 | i  | 301,  0,  0 | Act_Identity |          | 
302 |          | Input_Str_X4C_C_1      |  0.00000 |  0.14832 | i  | 302,  0,  0 | Act_Identity |          | 
303 |          | Input_Str_X4C1         |  0.00000 |  0.18173 | i  | 303,  0,  0 | Act_Identity |          | 
304 |          | Input_Str_X4G_NR       |  0.00000 |  0.19335 | i  | 304,  0,  0 | Act_Identity |          | 
305 |          | Input_Str_X5_C_B       |  0.00000 |  0.16698 | i  | 305,  0,  0 | Act_Identity |          | 
306 |          | Input_Str_X5_NR        |  0.00000 |  0.00593 | i  | 306,  0,  0 | Act_Identity |          | 
307 |          | Input_Str_X5A_C_2.5    |  0.00000 | -0.19869 | i  | 307,  0,  0 | Act_Identity |          | 
308 |          | Input_Str_X5A_NR       |  0.18092 | -0.00582 | i  | 308,  0,  0 | Act_Identity |          | 
309 |          | Input_Str_X5A1         |  0.00000 | -0.29797 | i  | 309,  0,  0 | Act_Identity |          | 
310 |          | Input_Str_X5A2         |  0.00000 | -0.23626 | i  | 310,  0,  0 | Act_Identity |          | 
311 |          | Input_Str_X5A2b        |  0.00000 |  0.06984 | i  | 311,  0,  0 | Act_Identity |          | 
312 |          | Input_Str_X6_DC        |  0.00000 |  0.00562 | i  | 312,  0,  0 | Act_Identity |          | 
313 |          | Input_Str_X6A1         |  0.00000 | -0.15119 | i  | 313,  0,  0 | Act_Identity |          | 
314 |          | Input_Str_X6A1_UC      |  0.00000 |  0.09990 | i  | 314,  0,  0 | Act_Identity |          | 
315 |          | Input_Str_X6B1         |  0.00000 |  0.22774 | i  | 315,  0,  0 | Act_Identity |          | 
316 |          | Input_Str_X6B2         |  0.00000 | -0.27342 | i  | 316,  0,  0 | Act_Identity |          | 
317 |          | Input_Str_X6B2b        |  0.00000 |  0.24611 | i  | 317,  0,  0 | Act_Identity |          | 
318 |          | Input_Str_X6B3         |  0.00000 | -0.14571 | i  | 318,  0,  0 | Act_Identity |          | 
319 |          | Input_Str_X6B3a        |  0.00000 |  0.06527 | i  | 319,  0,  0 | Act_Identity |          | 
320 |          | Input_Str_X6B3b        |  0.00000 |  0.17293 | i  | 320,  0,  0 | Act_Identity |          | 
321 |          | Input_Str_X6B4_0_30    |  0.00000 | -0.22618 | i  | 321,  0,  0 | Act_Identity |          | 
322 |          | Input_Str_X6B4_30_100  |  0.00000 | -0.10631 | i  | 322,  0,  0 | Act_Identity |          | 
323 |          | Input_Str_X6H1_HOC     |  0.00000 |  0.17875 | i  | 323,  0,  0 | Act_Identity |          | 
324 |          | Input_Str_X6H1_POC     |  0.00000 |  0.09509 | i  | 324,  0,  0 | Act_Identity |          | 
325 |          | Input_Str_X6H1_ROC     |  0.00000 | -0.19154 | i  | 325,  0,  0 | Act_Identity |          | 
326 |          | Input_Str_X6H1_TOC     |  0.00000 | -0.00267 | i  | 326,  0,  0 | Act_Identity |          | 
327 |          | Input_Str_X6H2a        |  0.00000 | -0.14743 | i  | 327,  0,  0 | Act_Identity |          | 
328 |          | Input_Str_X6H2b        |  0.00000 |  0.15428 | i  | 328,  0,  0 | Act_Identity |          | 
329 |          | Input_Str_X6H2c        |  0.00000 |  0.15885 | i  | 329,  0,  0 | Act_Identity |          | 
330 |          | Input_Str_X6H3         |  0.00000 |  0.09468 | i  | 330,  0,  0 | Act_Identity |          | 
331 |          | Input_Str_X6H3_0_30    |  0.00000 | -0.05155 | i  | 331,  0,  0 | Act_Identity |          | 
332 |          | Input_Str_X6H3_30_100  |  0.00000 | -0.18413 | i  | 332,  0,  0 | Act_Identity |          | 
333 |          | Input_Str_X6Z          |  0.00000 | -0.16155 | i  | 333,  0,  0 | Act_Identity |          | 
334 |          | Input_Str_X7_C_B       |  0.00000 |  0.04257 | i  | 334,  0,  0 | Act_Identity |          | 
335 |          | Input_Str_X7_NR        |  0.00000 |  0.19296 | i  | 335,  0,  0 | Act_Identity |          | 
336 |          | Input_Str_X7A1         |  0.00000 |  0.25130 | i  | 336,  0,  0 | Act_Identity |          | 
337 |          | Input_Str_X7A2         |  0.00000 | -0.23645 | i  | 337,  0,  0 | Act_Identity |          | 
338 |          | Input_Str_X7A2a        |  0.00000 | -0.29663 | i  | 338,  0,  0 | Act_Identity |          | 
339 |          | Input_Str_X7A5         |  0.00000 | -0.02956 | i  | 339,  0,  0 | Act_Identity |          | 
340 |          | Input_Str_X7A6b_MCLW   |  0.00000 |  0.13405 | i  | 340,  0,  0 | Act_Identity |          | 
341 |          | Input_Str_X7B1         |  0.00000 | -0.13943 | i  | 341,  0,  0 | Act_Identity |          | 
342 |          | Input_Str_X7C_1MKCla   |  0.00000 |  0.10766 | i  | 342,  0,  0 | Act_Identity |          | 
343 |          | Input_Str_X7C_1MKClb   |  0.00000 |  0.16769 | i  | 343,  0,  0 | Act_Identity |          | 
344 |          | Input_Str_X7C_CASO4    |  0.00000 |  0.01157 | i  | 344,  0,  0 | Act_Identity |          | 
345 |          | Input_Str_X7C1         |  0.00000 | -0.16012 | i  | 345,  0,  0 | Act_Identity |          | 
346 |          | Input_Str_X7C1a        |  0.00000 |  0.11384 | i  | 346,  0,  0 | Act_Identity |          | 
347 |          | Input_Str_X7C1b        |  0.00000 | -0.19148 | i  | 347,  0,  0 | Act_Identity |          | 
348 |          | Input_Str_X7C1d        |  0.00000 | -0.17012 | i  | 348,  0,  0 | Act_Identity |          | 
349 |          | Input_Str_X7C1e        |  0.00000 |  0.10239 | i  | 349,  0,  0 | Act_Identity |          | 
350 |          | Input_Str_X7C2b        |  0.00000 | -0.19299 | i  | 350,  0,  0 | Act_Identity |          | 
351 |          | Input_Str_X7C2b_NH4    |  0.00000 | -0.17238 | i  | 351,  0,  0 | Act_Identity |          | 
352 |          | Input_Str_X7C2b_NO3    |  0.00000 |  0.24135 | i  | 352,  0,  0 | Act_Identity |          | 
353 |          | Input_Str_X7D1a        |  0.00000 |  0.10899 | i  | 353,  0,  0 | Act_Identity |          | 
354 |          | Input_Str_X7E1a        |  0.00000 | -0.05777 | i  | 354,  0,  0 | Act_Identity |          | 
355 |          | Input_Str_X7E1b        |  0.00000 | -0.05967 | i  | 355,  0,  0 | Act_Identity |          | 
356 |          | Input_Str_X8A1         |  0.00000 |  0.12040 | i  | 356,  0,  0 | Act_Identity |          | 
357 |          | Input_Str_X9.00E.02    |  0.00000 | -0.07461 | i  | 357,  0,  0 | Act_Identity |          | 
358 |          | Input_Str_X9_E_NR      |  0.00000 | -0.00632 | i  | 358,  0,  0 | Act_Identity |          | 
359 |          | Input_Str_X9_NR        |  0.00000 | -0.11431 | i  | 359,  0,  0 | Act_Identity |          | 
360 |          | Input_Str_X9A_HCL      |  0.00000 |  0.09883 | i  | 360,  0,  0 | Act_Identity |          | 
361 |          | Input_Str_X9A_HCLP2O5  |  0.00000 | -0.17929 | i  | 361,  0,  0 | Act_Identity |          | 
362 |          | Input_Str_X9A_HF.      |  0.00000 | -0.14511 | i  | 362,  0,  0 | Act_Identity |          | 
363 |          | Input_Str_X9A_NR       |  0.00000 |  0.15594 | i  | 363,  0,  0 | Act_Identity |          | 
364 |          | Input_Str_X9A_S14      |  0.00000 |  0.02702 | i  | 364,  0,  0 | Act_Identity |          | 
365 |          | Input_Str_X9A1         |  0.00000 | -0.07142 | i  | 365,  0,  0 | Act_Identity |          | 
366 |          | Input_Str_X9A3         |  0.00000 |  0.15612 | i  | 366,  0,  0 | Act_Identity |          | 
367 |          | Input_Str_X9A3a        |  0.00000 |  0.11863 | i  | 367,  0,  0 | Act_Identity |          | 
368 |          | Input_Str_X9B          |  0.00000 | -0.02621 | i  | 368,  0,  0 | Act_Identity |          | 
369 |          | Input_Str_X9B_9C       |  0.00000 | -0.03746 | i  | 369,  0,  0 | Act_Identity |          | 
370 |          | Input_Str_X9B_NR       |  0.00000 |  0.18736 | i  | 370,  0,  0 | Act_Identity |          | 
371 |          | Input_Str_X9B1         |  0.00000 | -0.00874 | i  | 371,  0,  0 | Act_Identity |          | 
372 |          | Input_Str_X9B2         |  0.00000 | -0.09678 | i  | 372,  0,  0 | Act_Identity |          | 
373 |          | Input_Str_X9B2_COL     |  0.00000 |  0.03574 | i  | 373,  0,  0 | Act_Identity |          | 
374 |          | Input_Str_X9BUFF_0     |  0.00000 | -0.20123 | i  | 374,  0,  0 | Act_Identity |          | 
375 |          | Input_Str_X9BUFF_0.5   |  0.00000 | -0.13143 | i  | 375,  0,  0 | Act_Identity |          | 
376 |          | Input_Str_X9BUFF_1     |  0.00000 | -0.20888 | i  | 376,  0,  0 | Act_Identity |          | 
377 |          | Input_Str_X9BUFF_2     |  0.00000 |  0.14822 | i  | 377,  0,  0 | Act_Identity |          | 
378 |          | Input_Str_X9BUFF_4     |  0.00000 | -0.20079 | i  | 378,  0,  0 | Act_Identity |          | 
379 |          | Input_Str_X9C1         |  0.00000 |  0.23068 | i  | 379,  0,  0 | Act_Identity |          | 
380 |          | Input_Str_X9C2         |  0.00000 |  0.23493 | i  | 380,  0,  0 | Act_Identity |          | 
381 |          | Input_Str_X9D2         |  0.00000 | -0.07770 | i  | 381,  0,  0 | Act_Identity |          | 
382 |          | Input_Str_X9E          |  0.00000 | -0.11083 | i  | 382,  0,  0 | Act_Identity |          | 
383 |          | Input_Str_X9G_BSES     |  0.00000 |  0.26545 | i  | 383,  0,  0 | Act_Identity |          | 
384 |          | Input_Str_X9G1         |  0.00000 | -0.11128 | i  | 384,  0,  0 | Act_Identity |          | 
385 |          | Input_Str_X9G2         |  0.00000 |  0.09934 | i  | 385,  0,  0 | Act_Identity |          | 
386 |          | Input_Str_X9H_NR       |  0.00000 |  0.08191 | i  | 386,  0,  0 | Act_Identity |          | 
387 |          | Input_Str_X9H1         |  0.00000 | -0.27549 | i  | 387,  0,  0 | Act_Identity |          | 
388 |          | Input_Str_X9I1         |  0.00000 | -0.14915 | i  | 388,  0,  0 | Act_Identity |          | 
389 |          | Input_Str_X9I2b        |  0.00000 |  0.28272 | i  | 389,  0,  0 | Act_Identity |          | 
390 |          | Input_Str_X9I2B        |  0.00000 |  0.11530 | i  | 390,  0,  0 | Act_Identity |          | 
391 |          | Input_Str_X9J2         |  0.00000 |  0.25988 | i  | 391,  0,  0 | Act_Identity |          | 
392 |          | Input_Str_X9R1         |  0.00000 |  0.01372 | i  | 392,  0,  0 | Act_Identity |          | 
393 |          | Input_Str_M1a          |  0.00000 |  0.10949 | i  | 393,  0,  0 | Act_Identity |          | 
394 |          | Input_Str_MIN_EC       |  0.00000 | -0.03701 | i  | 394,  0,  0 | Act_Identity |          | 
395 |          | Input_Str_MIN_NR_K2O   |  0.00000 |  0.27564 | i  | 395,  0,  0 | Act_Identity |          | 
396 |          | Input_Str_P10_1m2m     |  0.00000 |  0.20976 | i  | 396,  0,  0 | Act_Identity |          | 
397 |          | Input_Str_P10_20_100   |  0.00000 |  0.09867 | i  | 397,  0,  0 | Act_Identity |          | 
398 |          | Input_Str_P10_20_75    |  0.00000 |  0.26268 | i  | 398,  0,  0 | Act_Identity |          | 
399 |          | Input_Str_P10_20_75a   |  0.00000 | -0.22437 | i  | 399,  0,  0 | Act_Identity |          | 
400 |          | Input_Str_P10_75_106   |  0.00000 |  0.15216 | i  | 400,  0,  0 | Act_Identity |          | 
401 |          | Input_Str_P10_C_MCLW   |  0.00000 |  0.03396 | i  | 401,  0,  0 | Act_Identity |          | 
402 |          | Input_Str_P10_CF_C     |  0.00000 |  0.13994 | i  | 402,  0,  0 | Act_Identity |          | 
403 |          | Input_Str_P10_CF_CS    |  0.00000 |  0.25416 | i  | 403,  0,  0 | Act_Identity |          | 
404 |          | Input_Str_P10_CF_FS    |  0.00000 | -0.00688 | i  | 404,  0,  0 | Act_Identity |          | 
405 |          | Input_Str_P10_CF_S     |  0.00000 | -0.23783 | i  | 405,  0,  0 | Act_Identity |          | 
406 |          | Input_Str_P10_CF_Z     |  0.00000 | -0.11808 | i  | 406,  0,  0 | Act_Identity |          | 
407 |          | Input_Str_P10_GRAV     |  0.00000 | -0.22799 | i  | 407,  0,  0 | Act_Identity |          | 
408 |          | Input_Str_P10_gt2m     |  0.00000 |  0.29885 | i  | 408,  0,  0 | Act_Identity |          | 
409 |          | Input_Str_P10_gt2MI    |  0.00000 |  0.23990 | i  | 409,  0,  0 | Act_Identity |          | 
410 |          | Input_Str_P10_gt2OM    |  0.00000 | -0.27747 | i  | 410,  0,  0 | Act_Identity |          | 
411 |          | Input_Str_P10_HYD_C    |  0.00000 | -0.03365 | i  | 411,  0,  0 | Act_Identity |          | 
412 |          | Input_Str_P10_HYD_CS   |  0.00000 | -0.17573 | i  | 412,  0,  0 | Act_Identity |          | 
413 |          | Input_Str_P10_HYD_FS   |  0.00000 |  0.02001 | i  | 413,  0,  0 | Act_Identity |          | 
414 |          | Input_Str_P10_HYD_S    |  0.00000 | -0.09668 | i  | 414,  0,  0 | Act_Identity |          | 
415 |          | Input_Str_P10_HYD_Z    |  0.00000 | -0.00714 | i  | 415,  0,  0 | Act_Identity |          | 
416 |          | Input_Str_P10_I_C      |  0.00000 |  0.29441 | i  | 416,  0,  0 | Act_Identity |          | 
417 |          | Input_Str_P10_I_CS     |  0.00000 | -0.19101 | i  | 417,  0,  0 | Act_Identity |          | 
418 |          | Input_Str_P10_I_FS     |  0.00000 | -0.17668 | i  | 418,  0,  0 | Act_Identity |          | 
419 |          | Input_Str_P10_I_S      |  0.00000 | -0.16519 | i  | 419,  0,  0 | Act_Identity |          | 
420 |          | Input_Str_P10_I_Z      |  0.00000 |  0.17005 | i  | 420,  0,  0 | Act_Identity |          | 
421 |          | Input_Str_P10_NR_C     |  0.00000 | -0.14017 | i  | 421,  0,  0 | Act_Identity |          | 
422 |          | Input_Str_P10_NR_CS    |  0.00000 | -0.29636 | i  | 422,  0,  0 | Act_Identity |          | 
423 |          | Input_Str_P10_NR_FS    |  0.00000 |  0.11383 | i  | 423,  0,  0 | Act_Identity |          | 
424 |          | Input_Str_P10_NR_S     |  0.00000 | -0.21335 | i  | 424,  0,  0 | Act_Identity |          | 
425 |          | Input_Str_P10_NR_Saa   |  0.00000 |  0.16785 | i  | 425,  0,  0 | Act_Identity |          | 
426 |          | Input_Str_P10_NR_Z     |  0.00000 | -0.27570 | i  | 426,  0,  0 | Act_Identity |          | 
427 |          | Input_Str_P10_NR_ZC    |  0.00000 | -0.26228 | i  | 427,  0,  0 | Act_Identity |          | 
428 |          | Input_Str_P10_PB_C     |  0.00000 | -0.17169 | i  | 428,  0,  0 | Act_Identity |          | 
429 |          | Input_Str_P10_PB_CS    |  0.00000 | -0.27478 | i  | 429,  0,  0 | Act_Identity |          | 
430 |          | Input_Str_P10_PB_FS    |  0.00000 |  0.02394 | i  | 430,  0,  0 | Act_Identity |          | 
431 |          | Input_Str_P10_PB_S     |  0.00000 | -0.14194 | i  | 431,  0,  0 | Act_Identity |          | 
432 |          | Input_Str_P10_PB_Z     |  0.00000 | -0.27156 | i  | 432,  0,  0 | Act_Identity |          | 
433 |          | Input_Str_P10_PB1_C    |  0.00000 | -0.02061 | i  | 433,  0,  0 | Act_Identity |          | 
434 |          | Input_Str_P10_PB1_CS   |  0.00000 |  0.04408 | i  | 434,  0,  0 | Act_Identity |          | 
435 |          | Input_Str_P10_PB1_FS   |  0.00000 |  0.26331 | i  | 435,  0,  0 | Act_Identity |          | 
436 |          | Input_Str_P10_PB1_Z    |  0.00000 | -0.19597 | i  | 436,  0,  0 | Act_Identity |          | 
437 |          | Input_Str_P10_S_0.20   |  0.00000 |  0.21131 | i  | 437,  0,  0 | Act_Identity |          | 
438 |          | Input_Str_P10_S_0.48   |  0.00000 | -0.21730 | i  | 438,  0,  0 | Act_Identity |          | 
439 |          | Input_Str_P10_S_1      |  0.00000 | -0.01211 | i  | 439,  0,  0 | Act_Identity |          | 
440 |          | Input_Str_P10_S_1000   |  0.00000 | -0.27435 | i  | 440,  0,  0 | Act_Identity |          | 
441 |          | Input_Str_P10_S_125    |  0.00000 | -0.05646 | i  | 441,  0,  0 | Act_Identity |          | 
442 |          | Input_Str_P10_S_15.6   |  0.00000 |  0.05901 | i  | 442,  0,  0 | Act_Identity |          | 
443 |          | Input_Str_P10_S_2      |  0.00000 | -0.14140 | i  | 443,  0,  0 | Act_Identity |          | 
444 |          | Input_Str_P10_S_20     |  0.00000 |  0.23696 | i  | 444,  0,  0 | Act_Identity |          | 
445 |          | Input_Str_P10_S_2000   |  0.00000 | -0.07020 | i  | 445,  0,  0 | Act_Identity |          | 
446 |          | Input_Str_P10_S_250    |  0.00000 | -0.16875 | i  | 446,  0,  0 | Act_Identity |          | 
447 |          | Input_Str_P10_S_3.9    |  0.00000 |  0.07389 | i  | 447,  0,  0 | Act_Identity |          | 
448 |          | Input_Str_P10_S_31.2   |  0.00000 | -0.14154 | i  | 448,  0,  0 | Act_Identity |          | 
449 |          | Input_Str_P10_S_500    |  0.00000 | -0.00841 | i  | 449,  0,  0 | Act_Identity |          | 
450 |          | Input_Str_P10_S_53     |  0.00000 | -0.27429 | i  | 450,  0,  0 | Act_Identity |          | 
451 |          | Input_Str_P10_S_63     |  0.00000 | -0.10398 | i  | 451,  0,  0 | Act_Identity |          | 
452 |          | Input_Str_P10_S_7.8    |  0.00000 | -0.06352 | i  | 452,  0,  0 | Act_Identity |          | 
453 |          | Input_Str_P10_S_MCLW   |  0.00000 |  0.20917 | i  | 453,  0,  0 | Act_Identity |          | 
454 |          | Input_Str_P10_Z_MCLW   |  0.00000 | -0.24524 | i  | 454,  0,  0 | Act_Identity |          | 
455 |          | Input_Str_P10100_200   |  0.00000 | -0.09693 | i  | 455,  0,  0 | Act_Identity |          | 
456 |          | Input_Str_P10106_150   |  0.00000 |  0.18911 | i  | 456,  0,  0 | Act_Identity |          | 
457 |          | Input_Str_P10150_180   |  0.00000 | -0.12823 | i  | 457,  0,  0 | Act_Identity |          | 
458 |          | Input_Str_P10180_300   |  0.00000 |  0.11000 | i  | 458,  0,  0 | Act_Identity |          | 
459 |          | Input_Str_P10200_500   |  0.00000 |  0.25947 | i  | 459,  0,  0 | Act_Identity |          | 
460 |          | Input_Str_P10200_600   |  0.00000 | -0.18610 | i  | 460,  0,  0 | Act_Identity |          | 
461 |          | Input_Str_P102002000   |  0.00000 |  0.04132 | i  | 461,  0,  0 | Act_Identity |          | 
462 |          | Input_Str_P10300_600   |  0.00000 | -0.14712 | i  | 462,  0,  0 | Act_Identity |          | 
463 |          | Input_Str_P105002000   |  0.00000 | -0.05468 | i  | 463,  0,  0 | Act_Identity |          | 
464 |          | Input_Str_P106001000   |  0.00000 |  0.19792 | i  | 464,  0,  0 | Act_Identity |          | 
465 |          | Input_Str_P106002000   |  0.00000 |  0.17128 | i  | 465,  0,  0 | Act_Identity |          | 
466 |          | Input_Str_P10A1_C      |  0.00000 | -0.19758 | i  | 466,  0,  0 | Act_Identity |          | 
467 |          | Input_Str_P10A1_CS     |  0.00000 |  0.21477 | i  | 467,  0,  0 | Act_Identity |          | 
468 |          | Input_Str_P10A1_FS     |  0.00000 |  0.01604 | i  | 468,  0,  0 | Act_Identity |          | 
469 |          | Input_Str_P10A1_Z      |  0.00000 | -0.24927 | i  | 469,  0,  0 | Act_Identity |          | 
470 |          | Input_Str_P3A_CLW      |  0.00000 | -0.12071 | i  | 470,  0,  0 | Act_Identity |          | 
471 |          | Input_Str_P3A_NR       |  0.00000 |  0.02230 | i  | 471,  0,  0 | Act_Identity |          | 
472 |          | Input_Str_P3A1         |  0.00000 |  0.00281 | i  | 472,  0,  0 | Act_Identity |          | 
473 |          | Input_Str_P3A1_C4      |  0.00000 |  0.02532 | i  | 473,  0,  0 | Act_Identity |          | 
474 |          | Input_Str_P3A1_CLOD    |  0.00000 | -0.08893 | i  | 474,  0,  0 | Act_Identity |          | 
475 |          | Input_Str_P3A1_e       |  0.00000 |  0.26154 | i  | 475,  0,  0 | Act_Identity |          | 
476 |          | Input_Str_P3A2_McK     |  0.00000 |  0.07468 | i  | 476,  0,  0 | Act_Identity |          | 
477 |          | Input_Str_P3A2_McKMP   |  0.00000 | -0.26197 | i  | 477,  0,  0 | Act_Identity |          | 
478 |          | Input_Str_P3B_GV_01    |  0.00000 |  0.03701 | i  | 478,  0,  0 | Act_Identity |          | 
479 |          | Input_Str_P3B_GV_03    |  0.00000 | -0.11953 | i  | 479,  0,  0 | Act_Identity |          | 
480 |          | Input_Str_P3B_GV_15    |  0.00000 |  0.06822 | i  | 480,  0,  0 | Act_Identity |          | 
481 |          | Input_Str_P3B_NR_005   |  0.00000 |  0.04719 | i  | 481,  0,  0 | Act_Identity |          | 
482 |          | Input_Str_P3B_NR_01    |  0.00000 |  0.19408 | i  | 482,  0,  0 | Act_Identity |          | 
483 |          | Input_Str_P3B_NR_15    |  0.00000 | -0.13506 | i  | 483,  0,  0 | Act_Identity |          | 
484 |          | Input_Str_P3B_VL_01    |  0.00000 |  0.20188 | i  | 484,  0,  0 | Act_Identity |          | 
485 |          | Input_Str_P3B_VL_15    |  0.00000 |  0.08572 | i  | 485,  0,  0 | Act_Identity |          | 
486 |          | Input_Str_P3B1GV_15    |  0.00000 | -0.13654 | i  | 486,  0,  0 | Act_Identity |          | 
487 |          | Input_Str_P3B1VL_1     |  0.00000 | -0.29106 | i  | 487,  0,  0 | Act_Identity |          | 
488 |          | Input_Str_P3B1VL_15    |  0.00000 | -0.24934 | i  | 488,  0,  0 | Act_Identity |          | 
489 |          | Input_Str_P3B2GV_1     |  0.00000 |  0.28188 | i  | 489,  0,  0 | Act_Identity |          | 
490 |          | Input_Str_P3B2GV_15    |  0.00000 | -0.06711 | i  | 490,  0,  0 | Act_Identity |          | 
491 |          | Input_Str_P3B2GV_5     |  0.00000 | -0.05753 | i  | 491,  0,  0 | Act_Identity |          | 
492 |          | Input_Str_P3B2VL_03    |  0.00000 | -0.08402 | i  | 492,  0,  0 | Act_Identity |          | 
493 |          | Input_Str_P3B2VL_1     |  0.00000 | -0.08936 | i  | 493,  0,  0 | Act_Identity |          | 
494 |          | Input_Str_P3B2VL_15    |  0.00000 |  0.15319 | i  | 494,  0,  0 | Act_Identity |          | 
495 |          | Input_Str_P3B2VL_5     |  0.00000 |  0.21923 | i  | 495,  0,  0 | Act_Identity |          | 
496 |          | Input_Str_P3B3VLa001   |  0.00000 |  0.27858 | i  | 496,  0,  0 | Act_Identity |          | 
497 |          | Input_Str_P3B3VLa005   |  0.00000 | -0.05305 | i  | 497,  0,  0 | Act_Identity |          | 
498 |          | Input_Str_P3B3VLa01    |  0.00000 | -0.27604 | i  | 498,  0,  0 | Act_Identity |          | 
499 |          | Input_Str_P3B3VLa03    |  0.00000 | -0.15086 | i  | 499,  0,  0 | Act_Identity |          | 
500 |          | Input_Str_P3B3VLa06    |  0.00000 | -0.01597 | i  | 500,  0,  0 | Act_Identity |          | 
501 |          | Input_Str_P3B3VLaSAT   |  0.00000 | -0.02706 | i  | 501,  0,  0 | Act_Identity |          | 
502 |          | Input_Str_P3B3VLb001   |  0.00000 |  0.02713 | i  | 502,  0,  0 | Act_Identity |          | 
503 |          | Input_Str_P3B3VLb003   |  0.00000 | -0.16229 | i  | 503,  0,  0 | Act_Identity |          | 
504 |          | Input_Str_P3B3VLb005   |  0.00000 |  0.13532 | i  | 504,  0,  0 | Act_Identity |          | 
505 |          | Input_Str_P3B3VLb01    |  0.00000 | -0.28005 | i  | 505,  0,  0 | Act_Identity |          | 
506 |          | Input_Str_P3B3VLb03    |  0.00000 |  0.19038 | i  | 506,  0,  0 | Act_Identity |          | 
507 |          | Input_Str_P3B3VLb05    |  0.00000 | -0.13637 | i  | 507,  0,  0 | Act_Identity |          | 
508 |          | Input_Str_P3B3VLb06    |  0.00000 | -0.25314 | i  | 508,  0,  0 | Act_Identity |          | 
509 |          | Input_Str_P3B3VLbSAT   |  0.00000 |  0.15193 | i  | 509,  0,  0 | Act_Identity |          | 
510 |          | Input_Str_P3B3VLc001   |  0.00000 |  0.04044 | i  | 510,  0,  0 | Act_Identity |          | 
511 |          | Input_Str_P3B3VLc003   |  0.00000 | -0.18439 | i  | 511,  0,  0 | Act_Identity |          | 
512 |          | Input_Str_P3B3VLc005   |  0.00000 |  0.15625 | i  | 512,  0,  0 | Act_Identity |          | 
513 |          | Input_Str_P3B3VLc01    |  0.00000 | -0.13435 | i  | 513,  0,  0 | Act_Identity |          | 
514 |          | Input_Str_P3B3VLc03    |  0.00000 | -0.28751 | i  | 514,  0,  0 | Act_Identity |          | 
515 |          | Input_Str_P3B3VLc06    |  0.00000 | -0.00060 | i  | 515,  0,  0 | Act_Identity |          | 
516 |          | Input_Str_P3B3VLcSAT   |  0.00000 |  0.19131 | i  | 516,  0,  0 | Act_Identity |          | 
517 |          | Input_Str_P3B3VLd06    |  0.00000 |  0.22852 | i  | 517,  0,  0 | Act_Identity |          | 
518 |          | Input_Str_P3B3VLd1     |  0.00000 | -0.12356 | i  | 518,  0,  0 | Act_Identity |          | 
519 |          | Input_Str_P3B3VLd15    |  0.00000 |  0.29312 | i  | 519,  0,  0 | Act_Identity |          | 
520 |          | Input_Str_P3B3VLd3     |  0.00000 |  0.24040 | i  | 520,  0,  0 | Act_Identity |          | 
521 |          | Input_Str_P3B3VLd5     |  0.00000 |  0.09911 | i  | 521,  0,  0 | Act_Identity |          | 
522 |          | Input_Str_P3B3VLe004   |  0.00000 | -0.16283 | i  | 522,  0,  0 | Act_Identity |          | 
523 |          | Input_Str_P3B3VLe01    |  0.00000 |  0.01344 | i  | 523,  0,  0 | Act_Identity |          | 
524 |          | Input_Str_P3B3VLe03    |  0.00000 | -0.14822 | i  | 524,  0,  0 | Act_Identity |          | 
525 |          | Input_Str_P3B3VLe06    |  0.00000 | -0.02906 | i  | 525,  0,  0 | Act_Identity |          | 
526 |          | Input_Str_P3B3VLe15    |  0.00000 | -0.21307 | i  | 526,  0,  0 | Act_Identity |          | 
527 |          | Input_Str_P3B3VLe2     |  0.00000 | -0.24540 | i  | 527,  0,  0 | Act_Identity |          | 
528 |          | Input_Str_P3B3VLe7     |  0.00000 | -0.09232 | i  | 528,  0,  0 | Act_Identity |          | 
529 |          | Input_Str_P3B4GV_01    |  0.00000 | -0.14617 | i  | 529,  0,  0 | Act_Identity |          | 
530 |          | Input_Str_P3B4VL_005   |  0.00000 |  0.17670 | i  | 530,  0,  0 | Act_Identity |          | 
531 |          | Input_Str_P3B5GV_01    |  0.00000 |  0.15555 | i  | 531,  0,  0 | Act_Identity |          | 
532 |          | Input_Str_P3B6VL_DUL   |  0.00000 |  0.04197 | i  | 532,  0,  0 | Act_Identity |          | 
533 |          | Input_Str_P3B6VL_LL    |  0.00000 |  0.20810 | i  | 533,  0,  0 | Act_Identity |          | 
534 |          | Input_Str_P3B6VL_SAT   |  0.00000 |  0.29816 | i  | 534,  0,  0 | Act_Identity |          | 
535 |          | Input_Str_P4_100DMcK   |  0.00000 | -0.21589 | i  | 535,  0,  0 | Act_Identity |          | 
536 |          | Input_Str_P4_10DMcK    |  0.00000 | -0.13006 | i  | 536,  0,  0 | Act_Identity |          | 
537 |          | Input_Str_P4_30_LOV    |  0.00000 |  0.13913 | i  | 537,  0,  0 | Act_Identity |          | 
538 |          | Input_Str_P4_30DMcK    |  0.00000 | -0.23203 | i  | 538,  0,  0 | Act_Identity |          | 
539 |          | Input_Str_P4_50_McK    |  0.00000 |  0.13416 | i  | 539,  0,  0 | Act_Identity |          | 
540 |          | Input_Str_P4_50DMcK    |  0.00000 |  0.16062 | i  | 540,  0,  0 | Act_Identity |          | 
541 |          | Input_Str_P4_sat       |  0.00000 | -0.07426 | i  | 541,  0,  0 | Act_Identity |          | 
542 |          | Input_Str_P4_sat_FH    |  0.00000 |  0.11622 | i  | 542,  0,  0 | Act_Identity |          | 
543 |          | Input_Str_P4_sat_For   |  0.00000 | -0.05937 | i  | 543,  0,  0 | Act_Identity |          | 
544 |          | Input_Str_P4_sat_LOV   |  0.00000 | -0.05748 | i  | 544,  0,  0 | Act_Identity |          | 
545 |          | Input_Str_P4_sat_McK   |  0.00000 |  0.27756 | i  | 545,  0,  0 | Act_Identity |          | 
546 |          | Input_Str_P5_COLE      |  0.00000 | -0.23580 | i  | 546,  0,  0 | Act_Identity |          | 
547 |          | Input_Str_P5_LS_MOD    |  0.00000 | -0.23613 | i  | 547,  0,  0 | Act_Identity |          | 
548 |          | Input_Str_P6_LP        |  0.00000 | -0.24521 | i  | 548,  0,  0 | Act_Identity |          | 
549 |          | Input_Str_PWS1.2mm     |  0.00000 |  0.00752 | i  | 549,  0,  0 | Act_Identity |          | 
550 |          | Input_Str_PWS20.63     |  0.00000 | -0.09446 | i  | 550,  0,  0 | Act_Identity |          | 
551 |          | Input_Str_PWS212.425   |  0.00000 | -0.20799 | i  | 551,  0,  0 | Act_Identity |          | 
552 |          | Input_Str_PWS425.1mm   |  0.00000 |  0.23614 | i  | 552,  0,  0 | Act_Identity |          | 
553 |          | Input_Str_PWS63.212    |  0.00000 |  0.27107 | i  | 553,  0,  0 | Act_Identity |          | 
554 |          | Input_Str_TE_MIR_AL2O3 |  0.00000 |  0.25217 | i  | 554,  0,  0 | Act_Identity |          | 
555 |          | Input_Str_TE_MIR_FE2O3 |  0.00000 |  0.12622 | i  | 555,  0,  0 | Act_Identity |          | 
556 |          | Input_Str_TE_MIR_SI02  |  0.00000 |  0.15521 | i  | 556,  0,  0 | Act_Identity |          | 
557 |          | Input_Str_TE_NR_AL     |  0.00000 | -0.01500 | i  | 557,  0,  0 | Act_Identity |          | 
558 |          | Input_Str_TE_NR_AL2O   |  0.00000 |  0.28961 | i  | 558,  0,  0 | Act_Identity |          | 
559 |          | Input_Str_TE_NR_CA     |  0.00000 |  0.03089 | i  | 559,  0,  0 | Act_Identity |          | 
560 |          | Input_Str_TE_NR_FE20   |  0.00000 |  0.05745 | i  | 560,  0,  0 | Act_Identity |          | 
561 |          | Input_Str_TE_NR_MG     |  0.00000 | -0.05308 | i  | 561,  0,  0 | Act_Identity |          | 
562 |          | Input_Str_TE_NR_NA     |  0.00000 | -0.27849 | i  | 562,  0,  0 | Act_Identity |          | 
563 |          | Input_Str_TE_NR_SI02   |  0.00000 |  0.22451 | i  | 563,  0,  0 | Act_Identity |          | 
564 |          | Input_Str_TE_NR_TI02   |  0.00000 | -0.25438 | i  | 564,  0,  0 | Act_Identity |          | 
565 |          | Input_Str_TE_XRF_MG    |  0.00000 |  0.03792 | i  | 565,  0,  0 | Act_Identity |          | 
566 |          | Input_Str_TE_XRFAL     |  0.00000 | -0.00485 | i  | 566,  0,  0 | Act_Identity |          | 
567 |          | Input_Str_TE_XRFCA     |  0.00000 |  0.17034 | i  | 567,  0,  0 | Act_Identity |          | 
568 |          | Input_Str_TE_XRFNA     |  0.00000 | -0.29528 | i  | 568,  0,  0 | Act_Identity |          | 
569 |          | Input_Str_TE_XRFSI02   |  0.00000 |  0.21742 | i  | 569,  0,  0 | Act_Identity |          | 
570 |          | Input_Str_TE_XRFTIO2   |  0.00000 | -0.28056 | i  | 570,  0,  0 | Act_Identity |          | 
571 |          | Input_Str_XRD_C_Amp    |  0.00000 | -0.15158 | i  | 571,  0,  0 | Act_Identity |          | 
572 |          | Input_Str_XRD_C_An     |  0.00000 | -0.23543 | i  | 572,  0,  0 | Act_Identity |          | 
573 |          | Input_Str_XRD_C_Bhm    |  0.00000 |  0.15474 | i  | 573,  0,  0 | Act_Identity |          | 
574 |          | Input_Str_XRD_C_Bt     |  0.00000 | -0.10811 | i  | 574,  0,  0 | Act_Identity |          | 
575 |          | Input_Str_XRD_C_Cal    |  0.00000 |  0.15284 | i  | 575,  0,  0 | Act_Identity |          | 
576 |          | Input_Str_XRD_C_Ch2    |  0.00000 | -0.06748 | i  | 576,  0,  0 | Act_Identity |          | 
577 |          | Input_Str_XRD_C_Chl    |  0.00000 | -0.24407 | i  | 577,  0,  0 | Act_Identity |          | 
578 |          | Input_Str_XRD_C_Fsp    |  0.00000 |  0.00711 | i  | 578,  0,  0 | Act_Identity |          | 
579 |          | Input_Str_XRD_C_Gbs    |  0.00000 |  0.17552 | i  | 579,  0,  0 | Act_Identity |          | 
580 |          | Input_Str_XRD_C_Gth    |  0.00000 | -0.10813 | i  | 580,  0,  0 | Act_Identity |          | 
581 |          | Input_Str_XRD_C_Hem    |  0.00000 |  0.15674 | i  | 581,  0,  0 | Act_Identity |          | 
582 |          | Input_Str_XRD_C_Ht0    |  0.00000 |  0.23457 | i  | 582,  0,  0 | Act_Identity |          | 
583 |          | Input_Str_XRD_C_Ilt    |  0.00000 |  0.29490 | i  | 583,  0,  0 | Act_Identity |          | 
584 |          | Input_Str_XRD_C_Is     |  0.00000 |  0.13797 | i  | 584,  0,  0 | Act_Identity |          | 
585 |          | Input_Str_XRD_C_K2O    |  0.00000 | -0.01415 | i  | 585,  0,  0 | Act_Identity |          | 
586 |          | Input_Str_XRD_C_Ka     |  0.00000 |  0.06244 | i  | 586,  0,  0 | Act_Identity |          | 
587 |          | Input_Str_XRD_C_Kln    |  0.00000 |  0.29406 | i  | 587,  0,  0 | Act_Identity |          | 
588 |          | Input_Str_XRD_C_Lp     |  0.00000 | -0.00403 | i  | 588,  0,  0 | Act_Identity |          | 
589 |          | Input_Str_XRD_C_Mag    |  0.00000 |  0.00046 | i  | 589,  0,  0 | Act_Identity |          | 
590 |          | Input_Str_XRD_C_Mca    |  0.00000 |  0.27909 | i  | 590,  0,  0 | Act_Identity |          | 
591 |          | Input_Str_XRD_C_Mgh    |  0.00000 |  0.16336 | i  | 591,  0,  0 | Act_Identity |          | 
592 |          | Input_Str_XRD_C_Mnt    |  0.00000 |  0.05720 | i  | 592,  0,  0 | Act_Identity |          | 
593 |          | Input_Str_XRD_C_Ms     |  0.00000 | -0.28110 | i  | 593,  0,  0 | Act_Identity |          | 
594 |          | Input_Str_XRD_C_Plg    |  0.00000 |  0.15301 | i  | 594,  0,  0 | Act_Identity |          | 
595 |          | Input_Str_XRD_C_Plm    |  0.00000 |  0.13329 | i  | 595,  0,  0 | Act_Identity |          | 
596 |          | Input_Str_XRD_C_Qz     |  0.00000 |  0.21286 | i  | 596,  0,  0 | Act_Identity |          | 
597 |          | Input_Str_XRD_C_Rt     |  0.00000 | -0.15306 | i  | 597,  0,  0 | Act_Identity |          | 
598 |          | Input_Str_XRD_C_Sme    |  0.00000 |  0.03596 | i  | 598,  0,  0 | Act_Identity |          | 
599 |          | Input_Str_XRD_C_Tc     |  0.00000 | -0.28286 | i  | 599,  0,  0 | Act_Identity |          | 
600 |          | Input_Str_XRD_C_Vrm    |  0.00000 |  0.01300 | i  | 600,  0,  0 | Act_Identity |          | 
601 |          | Hidden_2_1             |  0.20765 | -1.68655 | h  |   1,  2,  0 |||
602 |          | Hidden_2_2             |  0.20939 | -1.27390 | h  |   2,  2,  0 |||
603 |          | Hidden_2_3             |  0.17573 | -1.64241 | h  |   3,  2,  0 |||
604 |          | Hidden_2_4             |  0.29427 | -0.84957 | h  |   4,  2,  0 |||
605 |          | Hidden_2_5             |  0.17668 | -1.47240 | h  |   5,  2,  0 |||
606 |          | Output_1               |  0.56644 | -0.63037 | o  |   1,  4,  0 |||
----|----------|------------------------|----------|----------|----|-------------|--------------|----------|-------


connection definition section :

target | site | source:weight
-------|------|----------------------------------------------------------------------------------------------------------------
   601 |      | 600: 0.01113, 599:-0.16694, 598: 0.07455, 597:-0.25390, 596: 0.26497, 595: 0.05459, 594: 0.14804, 593:-0.18525,
                592: 0.09752, 591: 0.00123, 590:-0.31415, 589: 0.28966, 588:-0.33958, 587:-0.29329, 586:-0.08468, 585: 0.20577,
                584: 0.33467, 583:-0.21245, 582: 0.19591, 581: 0.02470, 580:-0.31672, 579:-0.18204, 578:-0.24825, 577:-0.12191,
                576:-0.27648, 575:-0.19230, 574:-0.16589, 573: 0.07941, 572:-0.18542, 571: 0.06498, 570:-0.06939, 569: 0.31812,
                568:-0.26382, 567:-0.13202, 566: 0.21160, 565: 0.00275, 564:-0.35661, 563:-0.08577, 562:-0.13918, 561:-0.20547,
                560:-0.06378, 559:-0.02889, 558:-0.06169, 557:-0.21646, 556: 0.04009, 555:-0.28623, 554: 0.05998, 553: 0.21053,
                552: 0.32320, 551:-0.16423, 550: 0.14411, 549:-0.23186, 548:-0.07519, 547:-0.27107, 546:-0.17791, 545:-0.11340,
                544:-0.25414, 543: 0.05449, 542:-0.24530, 541:-0.16883, 540: 0.13200, 539:-0.28951, 538: 0.22911, 537:-0.03186,
                536:-0.14073, 535:-0.04396, 534:-0.23093, 533: 0.23807, 532:-0.17091, 531: 0.30489, 530:-0.23378, 529:-0.10415,
                528: 0.27740, 527:-0.09682, 526: 0.02363, 525: 0.02757, 524: 0.02695, 523:-0.03320, 522:-0.19887, 521: 0.12526,
                520: 0.06008, 519: 0.00669, 518: 0.30625, 517: 0.40918, 516: 0.67020, 515: 0.01959, 514:-0.06351, 513: 0.47840,
                512: 0.35672, 511: 0.28992, 510: 0.03991, 509: 0.01323, 508:-0.26203, 507: 0.13275, 506:-0.07764, 505: 0.09270,
                504:-0.12162, 503: 0.30517, 502:-0.31641, 501:-0.02961, 500:-0.04719, 499: 0.09951, 498:-0.11299, 497:-0.18158,
                496:-0.23042, 495:-0.24126, 494: 0.02634, 493: 0.01464, 492:-0.26601, 491:-0.03017, 490: 0.00729, 489:-0.09158,
                488: 0.04997, 487:-0.21079, 486: 0.19490, 485:-0.39175, 484: 0.29823, 483: 0.02046, 482: 0.08220, 481: 0.18409,
                480: 0.12493, 479: 0.29082, 478: 0.08730, 477:-0.11736, 476:-0.03013, 475: 0.09145, 474: 0.00616, 473:-0.27849,
                472:-0.01710, 471:-0.01302, 470:-0.21792, 469:-0.16587, 468:-0.04696, 467:-0.05292, 466:-0.09001, 465:-0.19854,
                464: 0.16328, 463: 0.30006, 462: 0.19521, 461: 0.11096, 460:-0.28098, 459:-0.25562, 458:-0.24756, 457:-0.30645,
                456:-0.02375, 455: 0.11205, 454:-0.14126, 453:-0.25609, 452:-0.28816, 451: 0.25429, 450:-0.00806, 449:-0.21617,
                448:-0.34358, 447: 0.00795, 446:-0.14975, 445:-0.32555, 444: 0.12676, 443:-0.28932, 442: 0.10651, 441:-0.33647,
                440:-0.31515, 439: 0.09519, 438:-0.01351, 437: 0.09380, 436: 0.07243, 435:-0.13174, 434: 0.31319, 433:-0.29730,
                432:-0.36471, 431:-0.17134, 430: 0.19588, 429: 0.23532, 428: 0.21139, 427:-0.17144, 426: 0.55630, 425:-0.30101,
                424: 0.50986, 423: 0.04334, 422: 0.12744, 421: 0.06589, 420: 0.28379, 419:-0.27906, 418:-0.26694, 417:-0.15497,
                416: 0.10806, 415: 0.38493, 414: 0.21213, 413: 0.04411, 412:-0.26718, 411: 0.31128, 410:-0.30556, 409: 0.13484,
                408:-0.06702, 407: 0.14726, 406:-0.16917, 405:-0.12622, 404: 0.03011, 403: 0.21598, 402: 0.35494, 401: 0.07078,
                400: 0.15666, 399: 0.26543, 398: 0.09867, 397:-0.10768, 396:-0.02516, 395: 0.34101, 394: 0.35473, 393:-0.16069,
                392:-0.15058, 391:-0.06212, 390:-0.17102, 389: 0.14733, 388:-0.10583, 387: 0.16119, 386:-0.08222, 385: 0.29187,
                384: 0.26291, 383: 0.02910, 382: 0.04344, 381:-0.14950, 380:-0.15623, 379: 0.19473, 378:-0.04747, 377:-0.31097,
                376: 0.01781, 375: 0.17775, 374:-0.02589, 373: 0.28450, 372:-0.11830, 371:-0.01591, 370: 0.02650, 369: 0.14994,
                368:-0.00476, 367: 0.24928, 366:-0.09552, 365: 0.06572, 364: 0.28794, 363:-0.23359, 362:-0.13036, 361:-0.10140,
                360:-0.22687, 359:-0.39161, 358:-0.10565, 357: 0.31394, 356: 0.47666, 355:-0.13950, 354:-0.09256, 353: 0.27148,
                352: 0.17557, 351:-0.16239, 350:-0.01794, 349:-0.19362, 348:-0.00304, 347:-0.04087, 346:-0.29580, 345: 0.24006,
                344:-0.26033, 343: 0.27678, 342:-0.13708, 341:-0.27263, 340:-0.08721, 339:-0.07237, 338: 0.08320, 337:-0.22018,
                336: 0.09305, 335:-0.11838, 334: 0.34445, 333:-0.06074, 332: 0.14409, 331:-0.26896, 330:-0.04743, 329:-0.01307,
                328: 0.07566, 327:-0.00106, 326:-0.02742, 325:-0.01213, 324: 0.07087, 323: 0.07053, 322:-0.17283, 321: 0.16064,
                320: 0.12896, 319: 0.06529, 318: 0.17590, 317:-0.25601, 316: 0.04134, 315: 0.31024, 314:-0.10117, 313:-0.01598,
                312: 0.03569, 311: 0.01197, 310:-0.05876, 309:-0.07977, 308:-0.16423, 307:-0.26340, 306:-0.12540, 305:-0.20332,
                304: 0.23249, 303: 0.30167, 302: 0.12640, 301: 0.29249, 300:-0.11582, 299: 0.14972, 298: 0.81155, 297:-0.09981,
                296:-0.48513, 295:-0.11096, 294:-0.26394, 293: 0.39010, 292: 0.40373, 291:-0.32169, 290: 0.35689, 289: 0.17185,
                288: 0.05340, 287:-0.21780, 286:-0.41250, 285: 0.17391, 284: 0.27308, 283:-0.28451, 282: 0.07929, 281: 0.36309,
                280: 0.13487, 279:-0.19133, 278:-0.36502, 277:-0.26741, 276:-0.28242, 275:-0.15476, 274:-0.11367, 273:-0.22978,
                272: 0.21169, 271: 0.14360, 270: 0.06899, 269:-0.27692, 268:-0.45357, 267: 0.16336, 266:-0.15999, 265: 0.13546,
                264:-0.20251, 263: 0.07781, 262: 0.01015, 261: 0.17408, 260:-0.21057, 259:-0.04958, 258: 0.13554, 257: 0.29955,
                256: 0.22157, 255:-0.46015, 254: 0.21500, 253:-0.17595, 252: 0.01829, 251:-0.01476, 250:-0.36313, 249:-0.17547,
                248: 0.03735, 247: 0.10021, 246: 0.19502, 245: 0.17801, 244:-0.40689, 243: 0.18875, 242: 0.30731, 241: 0.26761,
                240: 0.10291, 239: 0.31441, 238:-0.08119, 237: 0.09921, 236: 0.27916, 235: 0.19688, 234:-0.08132, 233:-0.45067,
                232: 0.07412, 231: 0.12417, 230:-0.05768, 229: 0.09310, 228: 0.03452, 227: 0.18117, 226:-0.03701, 225: 0.29582,
                224: 0.02994, 223:-0.15031, 222:-0.09217, 221:-0.02594, 220: 0.27788, 219: 0.03748, 218:-0.28055, 217: 0.11831,
                216: 0.21940, 215: 0.05584, 214: 0.26488, 213:-0.19582, 212:-0.24238, 211:-0.17136, 210:-0.00372, 209:-0.03944,
                208: 0.19277, 207: 0.03384, 206: 0.22831, 205:-0.10005, 204: 0.44503, 203: 0.18404, 202: 0.26228, 201: 0.14413,
                200: 0.14909, 199:-0.10608, 198:-0.26207, 197:-0.15073, 196: 0.07597, 195: 0.18733, 194: 0.19448, 193: 0.26657,
                192: 0.15994, 191: 0.00966, 190: 0.00681, 189:-0.13320, 188:-0.20661, 187: 0.22112, 186: 0.20743, 185: 0.14266,
                184:-0.24331, 183:-0.16311, 182:-0.27337, 181:-0.31600, 180:-0.14054, 179: 0.17235, 178: 0.21943, 177:-0.15916,
                176:-0.27448, 175:-0.19396, 174: 0.04982, 173:-0.27243, 172: 0.37251, 171: 0.22619, 170:-0.05652, 169: 0.13875,
                168:-0.25339, 167: 0.54727, 166: 0.09832, 165:-0.20437, 164: 0.00157, 163: 0.29849, 162:-0.27783, 161:-0.10310,
                160: 0.03494, 159:-0.15935, 158: 0.00155, 157:-0.04531, 156: 0.02202, 155:-0.08299, 154:-0.23947, 153:-0.18456,
                152:-0.18134, 151:-0.11439, 150: 0.08598, 149:-0.11611, 148: 0.22331, 147:-0.12434, 146: 0.20027, 145: 0.06042,
                144: 0.07246, 143: 0.17087, 142:-0.21337, 141:-0.40142, 140:-0.13057, 139: 0.04371, 138:-0.15187, 137:-0.02149,
                136: 0.22049, 135: 0.08476, 134:-0.10746, 133:-0.26348, 132: 0.23322, 131: 0.21326, 130:-0.18932, 129:-0.15887,
                128: 0.22749, 127: 0.12267, 126:-0.36850, 125: 0.03510, 124:-0.24361, 123:-0.22012, 122:-0.19351, 121: 0.22463,
                120: 0.20703, 119: 0.02224, 118:-0.19413, 117: 0.19967, 116:-0.06878, 115: 0.22483, 114:-0.05224, 113: 0.32850,
                112: 0.06703, 111:-0.27627, 110: 0.11825, 109:-0.14178, 108: 0.24122, 107:-0.17126, 106: 0.26395, 105: 0.27912,
                104:-0.04843, 103:-0.07870, 102: 0.26508, 101:-0.01126, 100:-0.04770,  99: 0.22439,  98:-0.17490,  97:-0.08671,
                 96:-0.10403,  95: 0.03153,  94:-0.00451,  93:-0.05876,  92:-0.29394,  91: 0.23510,  90: 0.23440,  89:-0.08625,
                 88: 0.15811,  87:-0.22368,  86:-0.10244,  85:-0.30636,  84:-0.13431,  83:-0.13760,  82:-0.09368,  81: 0.34592,
                 80:-0.29292,  79: 0.04898,  78: 0.12840,  77:-0.13752,  76: 0.12610,  75: 0.24040,  74:-0.15546,  73:-0.19034,
                 72:-0.31521,  71: 0.27846,  70: 0.19863,  69: 0.14278,  68: 0.10096,  67:-0.21720,  66:-0.14810,  65:-0.00127,
                 64:-0.03053,  63:-0.06221,  62:-0.07923,  61: 0.21030,  60:-0.21321,  59:-0.28415,  58: 0.06433,  57: 0.47696,
                 56:-0.13348,  55: 0.17210,  54: 0.00829,  53:-1.02567,  52: 0.08326,  51: 0.27321,  50:-0.23955,  49: 0.07966,
                 48: 0.21641,  47:-0.15495,  46: 0.22723,  45: 0.18496,  44:-0.19546,  43:-0.03484,  42:-0.16759,  41:-0.30612,
                 40: 0.06104,  39: 0.04561,  38:-0.39570,  37: 0.16229,  36:-0.28626,  35:-0.04968,  34: 0.22855,  33: 0.06197,
                 32:-0.20617,  31: 0.13613,  30:-0.26357,  29:-0.19388,  28: 0.03895,  27:-0.28661,  26: 0.36939,  25: 0.17902,
                 24: 0.18722,  23: 0.20979,  22: 0.17044,  21: 0.19501,  20:-0.21736,  19:-0.01624,  18: 0.01264,  17:-0.14155,
                 16:-0.11040,  15: 0.11379,  14:-0.04552,  13:-0.09180,  12: 0.13289,  11:-0.29046,  10: 0.29461,   9:-0.28241,
                  8:-0.02823,   7: 0.21000,   6: 0.27532,   5:-0.26378,   4:-0.02591,   3: 0.12220,   2:-0.16112,   1: 0.28947
   602 |      | 600:-0.40451, 599: 0.01050, 598: 0.25698, 597:-0.01971, 596:-0.17477, 595:-0.20126, 594:-0.45937, 593: 0.28167,
                592:-0.19447, 591: 0.06592, 590: 0.43203, 589:-0.21491, 588: 0.20401, 587: 0.05074, 586:-0.21856, 585:-0.04795,
                584: 0.02135, 583: 0.01232, 582: 0.12038, 581: 0.20660, 580: 0.47073, 579: 0.22212, 578:-0.19287, 577: 0.01895,
                576:-0.11074, 575:-0.14932, 574: 0.09196, 573:-0.06156, 572: 0.27276, 571: 0.23578, 570:-0.03939, 569:-0.09530,
                568: 0.24505, 567: 0.15330, 566: 0.13660, 565: 0.09723, 564:-0.02341, 563: 0.06643, 562: 0.08273, 561:-0.28304,
                560: 0.14881, 559:-0.02428, 558: 0.19513, 557: 0.02046, 556: 0.22013, 555:-0.15858, 554: 0.26735, 553: 0.44088,
                552:-0.02608, 551: 0.47322, 550:-0.34776, 549: 0.15972, 548:-0.01831, 547: 0.01934, 546: 0.24862, 545: 0.37029,
                544: 0.23569, 543:-0.13513, 542: 0.02128, 541: 0.19940, 540:-0.02402, 539:-0.14841, 538:-0.25813, 537: 0.59858,
                536: 0.19027, 535: 0.06572, 534:-0.10372, 533:-0.00388, 532: 0.18997, 531: 0.18033, 530: 0.08122, 529:-0.24793,
                528: 0.17724, 527:-0.27792, 526:-0.37973, 525: 0.09081, 524: 0.05802, 523: 0.24224, 522: 0.10472, 521: 0.21952,
                520: 0.08164, 519:-0.11123, 518:-0.14859, 517:-0.10334, 516:-0.73129, 515: 0.22121, 514: 0.30850, 513:-0.25902,
                512: 0.02832, 511:-0.16751, 510: 0.37611, 509: 0.08351, 508:-0.07675, 507: 0.21783, 506: 0.00879, 505: 0.03134,
                504: 0.28042, 503:-0.20531, 502: 0.18326, 501: 0.07143, 500: 0.31378, 499:-0.08578, 498: 0.11781, 497: 0.15420,
                496: 0.23465, 495:-0.06551, 494: 0.36900, 493: 0.17406, 492:-0.10399, 491: 0.38239, 490: 0.02739, 489: 0.19866,
                488: 0.16292, 487:-0.03198, 486:-0.22434, 485:-0.06865, 484:-0.41293, 483:-0.23428, 482:-0.09922, 481: 0.20665,
                480:-0.20714, 479: 0.01496, 478: 0.15106, 477:-0.15939, 476: 0.05210, 475: 0.12209, 474: 0.05026, 473: 0.47498,
                472:-0.14150, 471: 0.24032, 470: 0.00884, 469: 0.22463, 468: 0.07883, 467: 0.09748, 466: 0.19099, 465:-0.21580,
                464:-0.41012, 463:-0.13685, 462:-0.69130, 461:-0.49707, 460: 0.56708, 459: 0.37429, 458:-0.25788, 457: 0.43178,
                456: 0.42703, 455:-0.00232, 454:-0.02868, 453:-0.19594, 452:-0.17331, 451: 0.11039, 450:-0.40812, 449:-0.36457,
                448: 0.28817, 447: 0.12077, 446: 0.13750, 445: 0.24044, 444: 0.30684, 443:-0.01591, 442:-0.00832, 441: 0.04607,
                440: 0.04799, 439:-0.13392, 438: 0.08795, 437: 0.06481, 436: 0.60142, 435:-0.14371, 434:-0.32561, 433: 0.24357,
                432:-0.14346, 431:-0.21885, 430:-0.13101, 429:-0.17018, 428:-0.01221, 427:-0.33354, 426:-0.60936, 425: 0.22769,
                424: 0.31681, 423:-0.31179, 422:-0.55005, 421:-0.03946, 420:-0.27889, 419: 0.04145, 418: 0.04599, 417: 0.01750,
                416:-0.14103, 415:-0.05740, 414:-0.19449, 413: 0.52515, 412:-0.16010, 411:-0.42980, 410: 0.09247, 409: 0.58800,
                408: 0.07464, 407:-0.09567, 406: 0.05155, 405: 0.08525, 404: 0.31657, 403:-0.26388, 402:-0.07609, 401:-0.18270,
                400: 0.34175, 399: 0.08498, 398:-0.62183, 397:-0.25209, 396: 0.38168, 395:-0.08947, 394: 0.09720, 393: 0.06730,
                392: 0.04781, 391: 0.30068, 390: 0.11618, 389: 0.15618, 388: 0.09567, 387:-0.32079, 386: 0.08535, 385:-0.24722,
                384:-0.23188, 383: 0.06894, 382:-0.12765, 381: 0.20214, 380: 0.23969, 379:-0.19352, 378: 0.15453, 377: 0.00290,
                376:-0.07582, 375:-0.02260, 374: 0.22083, 373: 0.16790, 372: 0.11297, 371: 0.07158, 370: 0.02773, 369: 0.21119,
                368:-0.19297, 367:-0.28593, 366:-0.05009, 365:-0.05420, 364:-0.08704, 363: 0.18430, 362: 0.37341, 361: 0.09752,
                360:-0.08965, 359: 0.69416, 358: 0.19057, 357:-0.42802, 356:-0.51148, 355: 0.26894, 354:-0.06806, 353: 0.03760,
                352:-0.00189, 351:-0.11622, 350: 0.22265, 349: 0.21677, 348: 0.08530, 347:-0.18921, 346: 0.23953, 345:-0.06465,
                344: 0.33446, 343:-0.13517, 342: 0.25181, 341:-0.04604, 340:-0.19718, 339: 0.17936, 338: 0.31543, 337: 0.38938,
                336:-0.40635, 335:-0.02939, 334:-0.24760, 333: 0.30883, 332: 0.19909, 331: 0.02177, 330:-0.29204, 329:-0.00186,
                328:-0.08329, 327: 0.01400, 326: 0.10005, 325: 0.27287, 324:-0.27875, 323:-0.23160, 322:-0.25191, 321: 0.01849,
                320: 0.06810, 319:-0.01950, 318:-0.00806, 317: 0.26611, 316:-0.06583, 315:-0.40927, 314: 0.60721, 313: 0.34219,
                312: 0.18865, 311:-0.04946, 310:-0.41295, 309:-0.26030, 308:-0.02737, 307:-0.17423, 306:-0.09516, 305:-0.38917,
                304: 0.13932, 303:-0.09758, 302: 0.26984, 301:-0.18968, 300:-0.08704, 299:-0.01068, 298: 0.00394, 297:-0.13001,
                296: 0.67293, 295:-0.24800, 294:-0.21391, 293:-0.77770, 292:-0.51783, 291: 0.21522, 290:-0.65233, 289:-0.21667,
                288:-0.46558, 287:-0.01197, 286: 0.53903, 285:-0.25380, 284:-0.11721, 283:-0.19870, 282: 0.14871, 281:-0.11580,
                280:-0.27303, 279:-0.09995, 278: 0.50543, 277:-0.01916, 276: 0.17032, 275: 0.11313, 274:-0.34437, 273: 0.02720,
                272:-0.29385, 271:-0.07518, 270: 0.01602, 269:-0.21707, 268: 0.18473, 267: 0.13134, 266:-0.27260, 265:-0.19629,
                264:-0.11563, 263: 0.05346, 262: 0.01013, 261: 0.13198, 260:-0.39109, 259:-0.23883, 258: 0.58870, 257:-0.12652,
                256:-0.59413, 255: 0.68115, 254:-0.21204, 253: 0.00551, 252: 0.02495, 251:-0.18744, 250: 0.02555, 249: 0.25670,
                248:-0.60826, 247:-0.28940, 246:-0.30386, 245: 0.06453, 244:-0.11850, 243:-0.21698, 242:-0.76734, 241:-1.24201,
                240:-0.06690, 239:-0.45256, 238:-0.19740, 237: 0.12666, 236: 0.24608, 235: 0.36658, 234:-0.26970, 233: 0.31875,
                232:-0.13514, 231:-0.77361, 230: 0.01953, 229: 0.00672, 228: 0.21156, 227:-0.24087, 226: 0.17751, 225:-0.22605,
                224: 0.17016, 223: 0.11339, 222: 0.44237, 221: 0.15435, 220:-0.55723, 219:-0.24768, 218:-0.20055, 217:-0.08340,
                216: 0.09574, 215:-0.06877, 214:-0.01774, 213:-0.21023, 212: 0.18737, 211:-0.18103, 210: 0.28017, 209:-0.24858,
                208: 0.24410, 207: 0.07135, 206:-0.01594, 205: 0.08561, 204:-0.30148, 203:-0.10412, 202: 0.19811, 201: 0.28056,
                200: 0.20556, 199: 0.26884, 198: 0.10258, 197: 0.19553, 196:-0.24923, 195: 0.25288, 194:-0.04621, 193: 0.26471,
                192:-0.15749, 191: 0.17970, 190: 0.19051, 189: 0.29429, 188:-0.14844, 187:-0.41714, 186:-0.29101, 185: 0.03564,
                184:-0.00063, 183:-0.26948, 182:-0.11025, 181: 0.39444, 180:-0.01630, 179: 0.06955, 178: 0.13669, 177:-0.01924,
                176:-0.37773, 175:-0.10698, 174: 0.08894, 173: 0.05774, 172: 0.10715, 171:-0.34304, 170:-0.29214, 169: 0.19289,
                168: 0.25010, 167:-0.78915, 166: 0.06359, 165: 0.07734, 164:-0.28473, 163:-0.24924, 162: 0.24338, 161: 0.02730,
                160:-0.24564, 159:-0.21202, 158:-0.18649, 157: 0.20179, 156: 0.34606, 155:-0.00561, 154: 0.04825, 153: 0.20237,
                152: 0.27509, 151: 0.18192, 150:-0.28308, 149:-0.09290, 148:-0.25986, 147:-0.01502, 146:-0.25284, 145: 0.06696,
                144:-0.11894, 143:-0.32491, 142: 0.02519, 141:-0.23695, 140:-0.47714, 139:-0.18847, 138: 0.28256, 137:-0.00454,
                136: 0.28915, 135:-0.04748, 134:-0.27333, 133:-0.06651, 132:-0.14920, 131: 0.12231, 130:-0.22406, 129:-0.36908,
                128: 0.38946, 127: 0.17512, 126: 0.78437, 125:-0.18086, 124: 0.31788, 123: 0.07317, 122: 0.15150, 121: 0.09347,
                120: 0.07667, 119: 0.22589, 118: 0.18211, 117: 0.26468, 116: 0.19884, 115:-0.24360, 114: 0.28399, 113:-0.28322,
                112:-0.18558, 111: 0.25550, 110: 0.03134, 109:-0.33869, 108: 0.13029, 107: 0.05015, 106:-0.19624, 105: 0.05679,
                104: 0.17401, 103: 0.03657, 102:-0.11230, 101:-0.29940, 100: 0.44462,  99:-0.05140,  98: 0.10479,  97:-0.22174,
                 96:-0.07872,  95:-0.47979,  94: 0.19587,  93:-0.53529,  92: 0.44065,  91: 0.19027,  90:-0.14778,  89:-0.13135,
                 88: 0.09126,  87: 0.32729,  86: 0.02026,  85:-0.03774,  84:-0.04430,  83:-0.01930,  82:-0.03157,  81:-0.45708,
                 80: 0.22014,  79: 0.50022,  78:-0.03620,  77:-0.28786,  76:-0.28591,  75: 0.00681,  74: 0.09079,  73:-0.06798,
                 72: 0.42184,  71:-0.36488,  70:-0.22580,  69: 0.01923,  68: 0.17692,  67: 0.23496,  66: 0.21558,  65:-0.22103,
                 64:-0.01921,  63: 0.40915,  62: 0.44356,  61:-0.34965,  60: 0.20015,  59: 0.01967,  58: 0.08115,  57:-0.43038,
                 56:-0.14731,  55: 0.30834,  54:-0.16837,  53: 1.67721,  52: 0.12212,  51: 0.01365,  50: 0.04612,  49: 0.68061,
                 48:-0.34494,  47:-0.26541,  46: 0.24129,  45: 0.15752,  44:-0.14276,  43:-0.15643,  42:-0.01458,  41: 0.34536,
                 40: 0.27999,  39:-0.07761,  38: 0.18133,  37:-0.25250,  36:-0.08825,  35:-0.00245,  34: 0.06167,  33: 0.29581,
                 32: 0.22277,  31:-0.15134,  30:-0.20759,  29: 0.15191,  28:-0.19313,  27: 0.24754,  26:-0.14901,  25:-0.30962,
                 24: 0.05922,  23: 0.05707,  22: 0.09727,  21:-0.02486,  20:-0.21034,  19:-0.15563,  18: 0.07045,  17: 0.00760,
                 16: 0.38407,  15:-0.24535,  14: 0.27101,  13:-0.22450,  12:-0.14445,  11: 0.49783,  10:-0.38172,   9: 0.25411,
                  8:-0.16554,   7:-0.05597,   6: 0.15872,   5:-0.31868,   4:-0.02981,   3:-0.31219,   2:-0.37469,   1: 0.01920
   603 |      | 600:-0.20863, 599: 0.21378, 598:-0.23093, 597: 0.21673, 596: 0.09911, 595:-0.26871, 594:-0.19775, 593: 0.20607,
                592:-0.10191, 591:-0.06818, 590: 0.13995, 589:-0.19603, 588:-0.11377, 587:-0.15540, 586: 0.19511, 585:-0.16100,
                584:-0.08435, 583:-0.05523, 582: 0.19449, 581:-0.23351, 580: 0.10176, 579:-0.28312, 578: 0.11664, 577: 0.09937,
                576:-0.21662, 575:-0.05099, 574: 0.03489, 573: 0.01650, 572:-0.24430, 571:-0.22789, 570: 0.27464, 569:-0.17616,
                568:-0.28997, 567:-0.03240, 566:-0.20683, 565: 0.19439, 564:-0.24143, 563: 0.15646, 562: 0.02187, 561:-0.29206,
                560: 0.06151, 559: 0.36583, 558: 0.02047, 557: 0.10910, 556:-0.11875, 555: 0.20465, 554: 0.23459, 553: 0.07644,
                552: 0.03374, 551: 0.00109, 550: 0.33390, 549:-0.19293, 548: 0.21521, 547: 0.05782, 546:-0.26838, 545: 0.12150,
                544:-0.27263, 543: 0.27470, 542:-0.14995, 541: 0.28960, 540: 0.17197, 539:-0.06710, 538: 0.00891, 537:-0.26726,
                536: 0.17134, 535: 0.19013, 534:-0.13199, 533:-0.29975, 532: 0.24250, 531:-0.21624, 530: 0.26780, 529:-0.03892,
                528: 0.00309, 527:-0.11610, 526:-0.10088, 525:-0.21701, 524:-0.02208, 523: 0.05400, 522:-0.20179, 521: 0.24025,
                520: 0.20974, 519: 0.20573, 518:-0.09273, 517:-0.23229, 516:-0.00859, 515:-0.12810, 514:-0.06859, 513: 0.00884,
                512:-0.16754, 511:-0.09939, 510:-0.35276, 509: 0.29452, 508:-0.07783, 507:-0.25329, 506: 0.02727, 505:-0.00998,
                504:-0.02694, 503: 0.18806, 502:-0.27279, 501:-0.37833, 500: 0.20332, 499:-0.15005, 498:-0.27051, 497:-0.18027,
                496:-0.16270, 495:-0.09128, 494:-0.11059, 493: 0.06209, 492:-0.20942, 491:-0.08068, 490:-0.27520, 489: 0.00060,
                488:-0.20044, 487: 0.01225, 486: 0.20669, 485:-0.29590, 484:-0.01744, 483: 0.30907, 482: 0.05278, 481:-0.09171,
                480:-0.23205, 479: 0.03843, 478:-0.33086, 477:-0.07365, 476: 0.37380, 475:-0.08989, 474: 0.29464, 473:-0.37761,
                472: 0.18537, 471:-0.38792, 470: 0.04186, 469: 0.00522, 468:-0.09907, 467:-0.01883, 466: 0.02589, 465: 0.33839,
                464: 0.00543, 463: 0.27574, 462: 0.46272, 461: 0.13851, 460: 0.11377, 459:-0.13997, 458: 0.11709, 457:-0.43441,
                456: 0.13791, 455:-0.33149, 454:-0.28856, 453:-0.22189, 452:-0.17180, 451:-0.19261, 450: 0.00697, 449:-0.08197,
                448: 0.02652, 447:-0.10070, 446: 0.02899, 445:-0.27894, 444:-0.19281, 443: 0.08897, 442: 0.01396, 441:-0.14022,
                440: 0.23809, 439:-0.04453, 438: 0.01241, 437:-0.09519, 436:-0.27997, 435:-0.05602, 434:-0.05223, 433:-0.04331,
                432:-0.08780, 431: 0.33186, 430:-0.00154, 429: 0.32400, 428:-0.33577, 427:-0.16210, 426:-0.28356, 425: 0.25338,
                424: 0.15790, 423:-0.11401, 422: 0.28409, 421: 0.07669, 420: 0.14995, 419: 0.27064, 418: 0.18014, 417:-0.13253,
                416:-0.16355, 415:-0.10439, 414:-0.16438, 413:-0.19546, 412:-0.25404, 411: 0.12275, 410: 0.14168, 409:-0.15136,
                408: 0.16833, 407:-0.08162, 406: 0.14750, 405:-0.14781, 404:-0.00125, 403: 0.00840, 402: 0.00728, 401: 0.00677,
                400: 0.07186, 399:-0.05823, 398: 0.54673, 397: 0.07889, 396:-0.23156, 395:-0.07470, 394: 0.23562, 393:-0.16593,
                392: 0.02575, 391:-0.10701, 390:-0.09124, 389:-0.01944, 388:-0.20434, 387:-0.09416, 386: 0.02692, 385: 0.26780,
                384: 0.08967, 383: 0.08114, 382: 0.09929, 381: 0.23883, 380: 0.06969, 379: 0.00193, 378:-0.03631, 377:-0.25366,
                376:-0.19266, 375:-0.00258, 374: 0.06657, 373: 0.18956, 372: 0.17706, 371: 0.16717, 370: 0.25180, 369:-0.18975,
                368:-0.06887, 367: 0.17419, 366: 0.08929, 365: 0.16932, 364:-0.18085, 363:-0.11645, 362: 0.12901, 361:-0.34281,
                360: 0.17435, 359:-0.12665, 358: 0.01604, 357: 0.11487, 356: 0.58856, 355:-0.14565, 354: 0.15210, 353: 0.20260,
                352: 0.26357, 351:-0.02617, 350:-0.05234, 349: 0.12552, 348: 0.04590, 347:-0.16194, 346:-0.17893, 345: 0.11705,
                344: 0.06647, 343: 0.00155, 342:-0.00368, 341:-0.28995, 340: 0.09358, 339:-0.30992, 338:-0.36591, 337:-0.21622,
                336: 0.22750, 335:-0.12885, 334: 0.28133, 333:-0.27186, 332:-0.28891, 331:-0.12516, 330: 0.14913, 329: 0.15403,
                328: 0.12332, 327:-0.08229, 326:-0.03071, 325: 0.21841, 324: 0.11171, 323: 0.20090, 322:-0.17550, 321:-0.16824,
                320:-0.14849, 319:-0.16909, 318:-0.19431, 317: 0.23184, 316:-0.07088, 315: 0.12364, 314:-0.10671, 313: 0.02758,
                312:-0.11704, 311: 0.22943, 310:-0.11750, 309: 0.09617, 308: 0.23214, 307:-0.06002, 306: 0.15585, 305:-0.23159,
                304:-0.14860, 303: 0.23668, 302:-0.05769, 301: 0.03517, 300:-0.03959, 299:-0.45173, 298: 0.09837, 297:-0.05461,
                296:-0.23313, 295:-0.22827, 294:-0.17858, 293: 0.18476, 292: 0.28799, 291: 0.18116, 290: 0.02273, 289: 0.13358,
                288:-0.06046, 287:-0.36231, 286:-0.37641, 285: 0.04177, 284: 0.27008, 283:-0.10298, 282:-0.30044, 281: 0.35878,
                280: 0.22123, 279:-0.05174, 278: 0.16959, 277: 0.10452, 276:-0.09863, 275:-0.26093, 274:-0.05651, 273: 0.01602,
                272: 0.05049, 271: 0.11411, 270: 0.25422, 269: 0.16356, 268: 0.13075, 267:-0.03363, 266: 0.03958, 265:-0.11467,
                264: 0.05600, 263: 0.22883, 262:-0.01072, 261: 0.26288, 260: 0.07868, 259:-0.21332, 258:-0.10193, 257: 0.29656,
                256: 0.17439, 255:-0.21409, 254: 0.20659, 253:-0.19983, 252: 0.35656, 251: 0.26275, 250:-0.27628, 249: 0.28773,
                248: 0.29305, 247: 0.08517, 246: 0.19474, 245: 0.27349, 244: 0.05656, 243: 0.09195, 242: 0.35914, 241: 0.21096,
                240:-0.29279, 239:-0.13804, 238: 0.10764, 237: 0.23172, 236:-0.09779, 235: 0.07648, 234:-0.29596, 233:-0.06233,
                232:-0.22745, 231: 0.13662, 230:-0.30759, 229: 0.10163, 228: 0.00896, 227:-0.08036, 226: 0.13468, 225: 0.10444,
                224:-0.24793, 223:-0.19311, 222:-0.09564, 221:-0.10130, 220: 0.10627, 219:-0.22313, 218: 0.08290, 217:-0.16188,
                216:-0.02102, 215:-0.25893, 214:-0.19708, 213: 0.18124, 212: 0.00037, 211: 0.12203, 210:-0.03398, 209:-0.04519,
                208: 0.12957, 207:-0.09505, 206:-0.31094, 205: 0.04991, 204:-0.14912, 203:-0.06113, 202: 0.13576, 201: 0.25802,
                200:-0.13927, 199: 0.13143, 198:-0.01527, 197:-0.19488, 196:-0.02541, 195:-0.02891, 194:-0.13119, 193:-0.19623,
                192:-0.29029, 191:-0.33738, 190: 0.20518, 189:-0.06521, 188:-0.29694, 187: 0.05170, 186:-0.00081, 185: 0.29350,
                184:-0.11992, 183: 0.25001, 182:-0.14311, 181:-0.25407, 180: 0.03451, 179: 0.30330, 178: 0.21769, 177:-0.22518,
                176: 0.02799, 175:-0.13786, 174: 0.13280, 173: 0.05536, 172:-0.20266, 171: 0.02415, 170: 0.01548, 169:-0.00979,
                168:-0.10867, 167: 0.04309, 166: 0.01421, 165: 0.11882, 164:-0.26493, 163: 0.04993, 162: 0.05800, 161: 0.01427,
                160:-0.23042, 159: 0.21461, 158:-0.01752, 157:-0.17920, 156: 0.22456, 155: 0.14923, 154:-0.32063, 153: 0.12642,
                152: 0.21377, 151: 0.14002, 150: 0.11776, 149:-0.24012, 148:-0.24994, 147: 0.31644, 146:-0.13206, 145: 0.13339,
                144:-0.30623, 143: 0.29582, 142: 0.03350, 141:-0.15473, 140:-0.07465, 139:-0.18858, 138:-0.26255, 137:-0.28617,
                136: 0.23517, 135:-0.01486, 134: 0.30560, 133: 0.25209, 132: 0.08773, 131: 0.27368, 130: 0.28540, 129: 0.22422,
                128:-0.00728, 127:-0.11940, 126:-0.06928, 125:-0.04596, 124: 0.01836, 123: 0.10387, 122: 0.22275, 121: 0.17003,
                120:-0.23860, 119:-0.10957, 118:-0.27931, 117:-0.19768, 116: 0.13116, 115: 0.11891, 114:-0.00978, 113:-0.24387,
                112:-0.10972, 111:-0.00942, 110:-0.26813, 109:-0.16770, 108: 0.14861, 107:-0.27550, 106: 0.15410, 105:-0.28269,
                104: 0.18714, 103: 0.28639, 102: 0.15770, 101: 0.25691, 100:-0.20080,  99:-0.21853,  98:-0.23197,  97: 0.05765,
                 96:-0.22551,  95:-0.07183,  94: 0.07209,  93:-0.14176,  92:-0.32462,  91:-0.03817,  90:-0.00702,  89: 0.23842,
                 88: 0.11771,  87:-0.23915,  86: 0.24832,  85: 0.23100,  84:-0.22435,  83:-0.33725,  82:-0.03913,  81: 0.03417,
                 80: 0.07195,  79:-0.03718,  78:-0.19284,  77: 0.03200,  76:-0.05988,  75: 0.27732,  74:-0.14343,  73: 0.09370,
                 72:-0.06791,  71:-0.14544,  70: 0.19328,  69:-0.28306,  68:-0.10919,  67:-0.10714,  66:-0.15379,  65: 0.30097,
                 64: 0.12482,  63:-0.25009,  62: 0.02204,  61: 0.37830,  60:-0.11041,  59:-0.07392,  58: 0.19811,  57:-0.03583,
                 56: 0.28940,  55: 0.04501,  54: 0.21000,  53:-0.78318,  52:-0.20027,  51:-0.28176,  50:-0.17073,  49:-0.21895,
                 48:-0.23690,  47: 0.06410,  46: 0.18246,  45:-0.10641,  44:-0.17759,  43: 0.19696,  42:-0.02115,  41:-0.26963,
                 40:-0.02475,  39: 0.03391,  38:-0.07740,  37:-0.13899,  36: 0.23175,  35:-0.23327,  34:-0.30496,  33:-0.10238,
                 32: 0.10242,  31: 0.19624,  30: 0.25391,  29:-0.00805,  28:-0.25946,  27: 0.08612,  26:-0.11903,  25: 0.03402,
                 24:-0.35241,  23:-0.13219,  22: 0.08578,  21:-0.03502,  20:-0.24982,  19:-0.26922,  18: 0.05535,  17: 0.06093,
                 16:-0.07677,  15:-0.10522,  14: 0.18509,  13:-0.19810,  12:-0.29430,  11:-0.19250,  10: 0.00239,   9:-0.18816,
                  8:-0.17389,   7:-0.11127,   6:-0.03991,   5:-0.15409,   4: 0.19237,   3:-0.11094,   2:-0.02868,   1: 0.10692
   604 |      | 600: 0.43595, 599:-0.00454, 598: 0.01227, 597:-0.19505, 596:-0.17983, 595:-0.23581, 594: 0.47385, 593: 0.00555,
                592: 0.10079, 591:-0.54115, 590:-0.01275, 589: 0.04598, 588:-0.48818, 587:-0.17583, 586:-0.00645, 585:-0.12591,
                584:-0.00567, 583: 0.34518, 582: 0.29101, 581:-0.96583, 580:-0.46490, 579:-0.34448, 578: 0.00628, 577:-0.01532,
                576:-0.27284, 575: 0.05076, 574: 0.03997, 573:-0.38792, 572:-0.22697, 571: 0.10862, 570: 1.00784, 569: 0.01815,
                568:-0.26435, 567: 0.32154, 566:-0.47171, 565: 0.35652, 564:-0.36119, 563:-0.06158, 562:-0.09843, 561:-0.11419,
                560: 0.04102, 559:-0.09674, 558:-0.16601, 557:-0.95275, 556:-0.23898, 555: 0.22697, 554: 0.09989, 553:-0.12859,
                552: 0.07510, 551:-0.82833, 550: 0.07138, 549:-0.16004, 548: 0.32620, 547: 0.39469, 546:-0.99661, 545:-0.59935,
                544:-0.28342, 543:-0.02321, 542:-0.32760, 541: 0.18615, 540:-0.05471, 539:-0.30666, 538:-0.25846, 537:-0.57771,
                536:-0.58749, 535: 0.07737, 534: 0.13604, 533: 0.21245, 532: 0.10074, 531: 0.01300, 530: 0.54468, 529: 0.61931,
                528: 0.17351, 527:-0.08536, 526: 0.09407, 525: 0.05287, 524: 0.08295, 523: 0.00649, 522:-0.02995, 521:-0.15735,
                520: 0.08633, 519:-0.08804, 518: 0.01625, 517: 0.00327, 516: 0.39748, 515:-0.15245, 514:-0.45754, 513:-0.42052,
                512:-0.58627, 511:-0.12152, 510:-0.64693, 509:-0.01627, 508:-0.14595, 507:-0.16911, 506: 0.06844, 505:-0.01834,
                504:-0.26572, 503: 0.45587, 502:-0.40755, 501:-0.33350, 500:-0.13922, 499:-0.29933, 498:-0.08269, 497: 0.22938,
                496:-0.08301, 495: 0.06593, 494: 0.10270, 493:-0.35721, 492:-0.26222, 491:-0.27249, 490:-0.51737, 489:-0.22576,
                488:-0.24580, 487: 0.34616, 486:-0.09028, 485:-0.70395, 484: 0.44880, 483:-0.03095, 482:-0.14913, 481:-0.28748,
                480:-0.00288, 479: 0.13632, 478:-0.22211, 477:-0.01899, 476: 0.21287, 475: 0.22223, 474:-0.01871, 473:-0.36065,
                472: 0.17290, 471:-0.50057, 470: 0.23353, 469:-0.69688, 468: 0.56591, 467: 0.27631, 466:-0.52089, 465: 0.38836,
                464:-0.05611, 463: 0.14367, 462: 0.35953, 461: 0.73990, 460:-0.41294, 459:-0.17388, 458: 0.44442, 457:-0.78351,
                456:-0.50180, 455:-0.35042, 454:-0.07996, 453:-0.27515, 452: 0.08810, 451: 0.16796, 450: 0.74819, 449: 0.72753,
                448:-0.37647, 447:-0.60993, 446: 0.14913, 445:-0.52746, 444:-0.52703, 443:-0.47273, 442:-0.17902, 441: 0.14989,
                440: 0.53993, 439: 0.05866, 438: 0.48794, 437:-0.07063, 436:-0.31200, 435: 0.00170, 434: 0.53762, 433:-0.41611,
                432:-0.21965, 431: 0.30368, 430:-0.45414, 429: 0.59889, 428: 0.01460, 427:-0.19110, 426:-1.89629, 425:-0.36979,
                424: 0.09048, 423:-0.30884, 422: 0.23003, 421:-0.00189, 420: 0.27589, 419:-0.13415, 418:-0.28782, 417:-0.08509,
                416:-0.19342, 415: 0.39726, 414:-0.10359, 413:-0.53806, 412: 0.20724, 411: 0.70722, 410:-0.16961, 409:-0.48183,
                408: 0.01205, 407:-0.28626, 406:-0.30869, 405: 0.15449, 404: 0.11080, 403: 0.60247, 402: 0.02494, 401: 0.08973,
                400:-0.23208, 399:-0.10341, 398: 0.67811, 397: 0.59650, 396:-0.78408, 395: 0.88021, 394: 0.06971, 393:-0.25285,
                392: 0.09683, 391:-0.04159, 390:-0.16989, 389:-0.26599, 388: 0.21489, 387:-0.16797, 386: 0.48327, 385: 0.23208,
                384: 0.33834, 383:-0.20954, 382: 0.60749, 381:-0.10159, 380:-0.31056, 379:-0.15848, 378: 0.07103, 377:-0.35811,
                376:-0.01389, 375:-0.22998, 374: 0.00876, 373: 0.54452, 372:-0.13582, 371:-0.28154, 370:-0.31574, 369:-0.24928,
                368: 0.02945, 367: 0.43039, 366:-0.39615, 365:-0.11236, 364: 0.02123, 363:-0.17480, 362:-0.40426, 361:-0.22781,
                360: 0.21146, 359:-1.22897, 358: 0.14604, 357: 0.34836, 356: 1.24333, 355:-0.27716, 354: 0.14258, 353:-0.15329,
                352:-0.18950, 351:-0.26401, 350: 0.27107, 349:-0.19462, 348:-0.42418, 347: 0.39947, 346:-0.18091, 345: 0.52284,
                344:-0.12592, 343:-0.12245, 342:-0.27280, 341: 0.18457, 340: 0.03986, 339:-0.97842, 338:-0.11840, 337:-0.44413,
                336: 0.43415, 335:-0.41769, 334: 0.39241, 333:-0.25825, 332: 0.18839, 331: 0.26569, 330:-0.17102, 329:-0.12434,
                328: 0.12550, 327:-0.23459, 326: 0.29786, 325: 0.05287, 324:-0.24008, 323:-0.00448, 322:-0.03892, 321:-0.15674,
                320: 0.17040, 319:-0.14600, 318:-0.18918, 317: 0.20109, 316:-1.04211, 315: 0.33242, 314:-1.14152, 313: 0.03567,
                312:-0.55265, 311: 0.27965, 310: 0.15824, 309:-0.00478, 308: 0.13780, 307:-0.25872, 306: 0.47049, 305: 0.13044,
                304: 0.44467, 303: 0.37339, 302:-0.33675, 301:-0.29191, 300:-0.28594, 299:-0.15594, 298:-0.33153, 297: 0.26523,
                296:-1.15277, 295: 0.50823, 294: 0.28218, 293: 1.41441, 292: 0.65745, 291:-0.31525, 290: 0.42700, 289: 0.03780,
                288: 0.74428, 287:-0.45667, 286:-0.66920, 285: 0.31086, 284: 0.40980, 283:-0.31339, 282:-0.30482, 281: 0.52384,
                280: 0.14185, 279: 0.05895, 278:-0.45645, 277: 0.00071, 276:-0.25286, 275:-1.20204, 274: 0.62521, 273:-0.14530,
                272: 0.26749, 271: 0.25944, 270: 0.21723, 269:-0.24586, 268:-0.27392, 267:-0.48475, 266: 0.32810, 265:-0.13632,
                264: 0.17568, 263: 0.09733, 262:-0.10474, 261:-0.14175, 260: 0.59269, 259:-0.03744, 258:-0.95238, 257: 0.14220,
                256: 0.67726, 255:-1.01409, 254: 0.21742, 253: 0.48818, 252: 0.39539, 251:-0.03681, 250:-0.43924, 249: 0.16496,
                248: 0.96784, 247: 0.17161, 246: 0.29594, 245:-0.02426, 244:-0.48532, 243:-0.01695, 242: 0.98089, 241: 2.48999,
                240: 0.13084, 239: 0.45191, 238: 0.27527, 237:-0.19954, 236: 0.05901, 235: 0.01161, 234: 0.19267, 233:-1.29326,
                232: 0.28281, 231: 1.36233, 230:-0.27785, 229: 0.17087, 228:-0.76602, 227:-0.04152, 226: 0.04193, 225: 0.33866,
                224:-0.09438, 223: 0.13379, 222:-0.04507, 221: 0.05018, 220: 0.62162, 219:-0.06264, 218: 0.07961, 217: 0.06025,
                216:-0.05015, 215:-0.07882, 214:-0.27092, 213:-0.16479, 212: 0.14959, 211:-0.14565, 210:-0.00321, 209:-0.05453,
                208:-0.17698, 207:-0.73942, 206:-0.35141, 205: 0.09777, 204: 0.45179, 203: 0.45632, 202:-0.01184, 201:-0.17238,
                200: 0.12767, 199:-0.29250, 198: 0.28978, 197:-0.08040, 196:-0.28179, 195:-0.09738, 194: 0.10811, 193: 0.02707,
                192:-0.22834, 191:-0.40177, 190: 0.16711, 189:-0.23378, 188: 0.01227, 187: 0.40694, 186: 0.34851, 185:-0.20632,
                184:-0.50394, 183: 0.38312, 182:-0.03649, 181:-0.51842, 180: 0.29892, 179:-0.13469, 178:-0.24135, 177:-0.28476,
                176: 0.47302, 175:-0.32144, 174:-0.22917, 173:-0.08915, 172: 0.09319, 171:-0.39928, 170: 0.45033, 169: 0.16842,
                168:-0.17402, 167: 0.49343, 166:-0.70895, 165:-0.39885, 164:-0.52635, 163: 0.07194, 162:-0.08493, 161: 0.15803,
                160:-0.27394, 159: 0.15310, 158:-0.05323, 157:-0.38160, 156:-0.19141, 155:-0.63489, 154:-0.15156, 153:-0.02622,
                152:-0.05302, 151:-0.28982, 150:-0.24595, 149: 0.20919, 148:-0.20775, 147: 0.43164, 146: 0.31484, 145:-0.38205,
                144: 0.15922, 143: 0.37402, 142: 0.26265, 141:-0.91392, 140: 0.20788, 139: 0.26162, 138: 0.25424, 137:-0.02190,
                136: 0.00365, 135:-0.26357, 134:-0.06359, 133:-0.01645, 132:-0.12152, 131: 0.15408, 130: 0.47015, 129: 0.07045,
                128:-0.07874, 127: 0.02497, 126:-1.19543, 125: 0.63187, 124:-0.33465, 123: 0.18242, 122: 0.25584, 121: 0.03028,
                120:-0.18015, 119:-0.02908, 118:-0.60898, 117: 0.14611, 116:-0.45737, 115: 0.40402, 114:-0.39863, 113: 0.41345,
                112: 0.44189, 111:-0.57989, 110:-0.01825, 109:-0.13462, 108:-0.06644, 107:-0.20147, 106:-0.27157, 105: 0.09221,
                104: 0.02906, 103: 0.05084, 102:-0.21476, 101:-0.00323, 100:-0.64107,  99: 0.45427,  98:-0.21581,  97: 0.09538,
                 96:-0.18076,  95: 0.23136,  94:-0.86694,  93: 0.62522,  92:-0.69128,  91: 0.19252,  90: 0.20722,  89: 0.01321,
                 88: 0.20736,  87:-0.95666,  86: 0.50788,  85:-0.45904,  84: 0.14993,  83: 0.07771,  82: 0.24850,  81: 0.51666,
                 80:-0.14116,  79:-0.71664,  78: 0.40200,  77: 0.28557,  76: 0.31152,  75: 0.02346,  74:-0.21334,  73: 0.26264,
                 72:-0.43136,  71: 0.45390,  70:-0.29221,  69: 0.32624,  68:-0.25167,  67: 0.18729,  66: 0.00485,  65: 0.86559,
                 64:-0.02324,  63:-0.31090,  62:-0.23006,  61: 0.96468,  60:-0.09346,  59:-0.16952,  58:-0.15155,  57: 0.84033,
                 56: 0.62627,  55:-0.50518,  54: 0.37338,  53:-4.23898,  52: 0.09926,  51:-0.49634,  50:-0.03217,  49:-0.54935,
                 48: 0.51720,  47:-0.13281,  46: 0.19723,  45: 0.06264,  44:-0.05335,  43: 0.35171,  42:-0.34330,  41:-0.48108,
                 40: 0.04511,  39: 0.15645,  38:-0.21014,  37:-0.29784,  36: 0.08413,  35:-0.23893,  34:-0.35204,  33:-0.04780,
                 32:-0.26275,  31:-0.24951,  30:-0.22201,  29: 0.10042,  28: 0.01165,  27:-0.72590,  26: 0.21174,  25: 0.07463,
                 24:-0.27968,  23:-0.02771,  22:-0.21091,  21:-0.48833,  20:-0.14193,  19:-0.40699,  18: 0.06631,  17: 0.05779,
                 16:-0.71998,  15:-0.15625,  14:-0.60367,  13: 0.31168,  12:-0.25808,  11:-0.95552,  10: 0.05949,   9: 0.12759,
                  8:-0.03761,   7: 0.16631,   6: 0.22399,   5:-0.03790,   4: 0.49273,   3: 0.16559,   2:-0.10039,   1: 0.02486
   605 |      | 600:-0.32901, 599:-0.20505, 598: 0.03426, 597:-0.02329, 596:-0.25324, 595: 0.22039, 594:-0.08314, 593: 0.24280,
                592:-0.06871, 591: 0.19590, 590: 0.17729, 589: 0.13549, 588:-0.22251, 587:-0.09548, 586:-0.08650, 585:-0.13550,
                584: 0.00379, 583:-0.33000, 582: 0.08882, 581:-0.04712, 580: 0.34902, 579: 0.41748, 578: 0.12136, 577:-0.19793,
                576: 0.07300, 575:-0.10725, 574:-0.25487, 573: 0.01133, 572: 0.36015, 571:-0.15085, 570:-0.48099, 569:-0.06718,
                568:-0.24672, 567:-0.03002, 566: 0.09331, 565: 0.17252, 564: 0.21595, 563: 0.23324, 562: 0.10754, 561: 0.18805,
                560:-0.25063, 559:-0.37146, 558: 0.29384, 557: 0.02669, 556: 0.06491, 555:-0.20653, 554:-0.04632, 553: 0.15222,
                552:-0.10810, 551: 0.49974, 550:-0.23594, 549: 0.25607, 548: 0.37907, 547: 0.22339, 546: 0.43464, 545: 0.06852,
                544: 0.38206, 543:-0.32527, 542: 0.24797, 541: 0.01122, 540: 0.09092, 539:-0.26130, 538: 0.02455, 537: 0.46487,
                536:-0.01704, 535: 0.15221, 534:-0.25730, 533: 0.26293, 532: 0.18166, 531: 0.25888, 530:-0.22925, 529: 0.08345,
                528: 0.15334, 527: 0.17251, 526:-0.30741, 525:-0.03522, 524: 0.02072, 523: 0.29387, 522: 0.19316, 521:-0.12227,
                520: 0.12130, 519: 0.19589, 518: 0.20036, 517:-0.18279, 516:-0.12236, 515: 0.09839, 514: 0.07997, 513:-0.03236,
                512:-0.33484, 511: 0.02707, 510: 0.46171, 509:-0.46403, 508:-0.02960, 507:-0.43948, 506:-0.34885, 505:-0.33046,
                504:-0.41336, 503:-0.77513, 502:-0.31926, 501:-0.03536, 500: 0.25643, 499:-0.22968, 498:-0.27131, 497: 0.05796,
                496:-0.01356, 495:-0.01239, 494:-0.11580, 493: 0.24628, 492: 0.27590, 491: 0.07271, 490:-0.01336, 489: 0.04921,
                488:-0.11714, 487: 0.01754, 486: 0.16497, 485:-0.16841, 484:-0.19586, 483: 0.11162, 482:-0.18071, 481: 0.29592,
                480: 0.22757, 479: 0.07272, 478: 0.37660, 477: 0.09794, 476: 0.05170, 475:-0.20540, 474: 0.00447, 473: 0.40152,
                472: 0.48435, 471: 0.22464, 470:-0.14856, 469: 0.22416, 468:-0.25205, 467: 0.06519, 466:-0.17230, 465:-0.26762,
                464:-0.26250, 463: 0.12502, 462:-0.35456, 461:-0.51144, 460: 0.03885, 459: 0.17529, 458: 0.02108, 457: 0.33443,
                456: 0.47065, 455:-0.10627, 454:-0.08949, 453:-0.11765, 452:-0.11463, 451: 0.25247, 450: 0.10049, 449:-0.10399,
                448: 0.14447, 447: 0.16968, 446:-0.23537, 445: 0.43178, 444: 0.11907, 443: 0.27883, 442:-0.10762, 441:-0.09433,
                440:-0.32796, 439:-0.35972, 438:-0.38080, 437:-0.14401, 436: 0.40275, 435:-0.18055, 434:-0.31011, 433: 0.44213,
                432: 0.10525, 431:-0.03075, 430: 0.38475, 429:-0.40923, 428: 0.13909, 427:-0.00420, 426:-0.28974, 425:-0.15888,
                424: 0.63387, 423:-0.20712, 422:-0.37254, 421:-0.39954, 420:-0.23141, 419:-0.22303, 418: 0.13480, 417:-0.27742,
                416: 0.18250, 415:-0.02993, 414:-0.04566, 413: 0.56137, 412: 0.06471, 411:-0.27712, 410: 0.09311, 409: 0.08268,
                408: 0.17735, 407:-0.05491, 406:-0.28148, 405: 0.13570, 404:-0.37512, 403: 0.18594, 402:-0.11706, 401:-0.24715,
                400:-0.04354, 399:-0.13444, 398:-0.57247, 397:-0.09582, 396: 0.07469, 395: 0.00195, 394: 0.07746, 393: 0.18315,
                392:-0.07104, 391: 0.27859, 390:-0.23449, 389: 0.00568, 388: 0.17784, 387:-0.11555, 386:-0.07683, 385: 0.25961,
                384:-0.26147, 383: 0.20808, 382:-0.13180, 381: 0.00097, 380: 0.00014, 379: 0.22099, 378:-0.10402, 377: 0.35027,
                376:-0.04872, 375:-0.05759, 374:-0.10531, 373:-0.32392, 372: 0.22701, 371:-0.15486, 370: 0.01020, 369: 0.30405,
                368: 0.22647, 367: 0.18415, 366:-0.09904, 365: 0.04266, 364: 0.24830, 363:-0.17874, 362: 0.09354, 361:-0.00262,
                360: 0.11137, 359: 0.16644, 358: 0.13778, 357:-0.10013, 356:-0.45553, 355: 0.05888, 354:-0.16912, 353:-0.07327,
                352:-0.08029, 351:-0.15828, 350:-0.19164, 349: 0.03242, 348: 0.13536, 347: 0.04412, 346:-0.04761, 345:-0.15623,
                344: 0.07605, 343:-0.16057, 342: 0.12483, 341: 0.15619, 340:-0.28249, 339: 0.49599, 338: 0.25786, 337: 0.12555,
                336: 0.07824, 335: 0.18339, 334: 0.04877, 333: 0.05908, 332:-0.21265, 331:-0.05555, 330:-0.23856, 329: 0.28631,
                328:-0.14611, 327: 0.10530, 326:-0.00460, 325:-0.02502, 324: 0.14159, 323:-0.21405, 322:-0.29432, 321:-0.14651,
                320: 0.05733, 319: 0.02302, 318: 0.13588, 317: 0.16717, 316: 0.15214, 315:-0.32689, 314: 0.19415, 313: 0.24979,
                312: 0.24579, 311:-0.13405, 310:-0.33221, 309:-0.27822, 308:-0.07269, 307: 0.02739, 306: 0.17205, 305:-0.04820,
                304: 0.01019, 303:-0.03461, 302: 0.02539, 301: 0.23290, 300:-0.08305, 299: 0.09336, 298:-0.08987, 297: 0.13111,
                296: 0.31574, 295:-0.36948, 294:-0.24719, 293:-0.21805, 292:-0.28215, 291: 0.15411, 290:-0.06970, 289:-0.19370,
                288:-0.07843, 287: 0.42215, 286: 0.07878, 285:-0.23104, 284:-0.26433, 283:-0.22623, 282: 0.32603, 281:-0.05373,
                280: 0.06822, 279:-0.09293, 278:-0.10603, 277:-0.38168, 276: 0.42557, 275:-0.06534, 274:-0.39113, 273: 0.14655,
                272: 0.00782, 271: 0.05031, 270:-0.11980, 269: 0.03598, 268: 0.35576, 267: 0.25536, 266:-0.22409, 265: 0.03411,
                264:-0.29157, 263: 0.11383, 262:-0.04536, 261:-0.04744, 260:-0.22738, 259: 0.14478, 258: 0.40417, 257:-0.11771,
                256:-0.23183, 255: 0.49590, 254: 0.02978, 253:-0.34416, 252:-0.36006, 251:-0.03785, 250: 0.21325, 249: 0.17068,
                248:-0.31578, 247:-0.01812, 246:-0.02672, 245:-0.12386, 244: 0.21984, 243: 0.12418, 242:-0.37055, 241:-0.84740,
                240: 0.18931, 239: 0.00065, 238:-0.22013, 237: 0.07617, 236: 0.03472, 235: 0.24355, 234: 0.24192, 233: 0.27602,
                232:-0.06698, 231:-0.74109, 230: 0.01075, 229:-0.08024, 228:-0.04741, 227: 0.27760, 226:-0.02562, 225: 0.20316,
                224:-0.02452, 223:-0.35036, 222: 0.04349, 221:-0.53228, 220:-0.00956, 219: 0.04814, 218:-0.29294, 217: 0.20631,
                216: 0.15673, 215:-0.16583, 214: 0.03952, 213:-0.29387, 212: 0.17831, 211:-0.15417, 210: 0.06804, 209:-0.00574,
                208: 0.06693, 207: 0.09607, 206:-0.00108, 205: 0.28158, 204:-0.27128, 203:-0.15411, 202: 0.11728, 201: 0.24387,
                200: 0.18051, 199:-0.20685, 198:-0.02205, 197: 0.13229, 196: 0.22004, 195: 0.14643, 194:-0.06058, 193:-0.11559,
                192:-0.18019, 191: 0.35745, 190:-0.29565, 189: 0.18979, 188:-0.11939, 187:-0.26806, 186:-0.06722, 185:-0.45620,
                184:-0.14505, 183: 0.11742, 182: 0.00392, 181: 0.44147, 180:-0.28157, 179:-0.05367, 178:-0.25158, 177: 0.00803,
                176:-0.32224, 175:-0.08313, 174:-0.16214, 173:-0.11599, 172: 0.18053, 171:-0.31758, 170:-0.37970, 169: 0.13194,
                168:-0.33397, 167:-0.20842, 166: 0.05291, 165:-0.01408, 164:-0.30133, 163:-0.18141, 162:-0.16360, 161: 0.17523,
                160: 0.05845, 159: 0.26657, 158:-0.01631, 157: 0.38329, 156: 0.15814, 155:-0.07443, 154:-0.10423, 153:-0.24469,
                152:-0.18059, 151: 0.06716, 150: 0.17718, 149: 0.13133, 148: 0.22000, 147: 0.00323, 146:-0.20053, 145:-0.00306,
                144:-0.12677, 143:-0.06679, 142: 0.18823, 141: 0.23318, 140: 0.02743, 139: 0.28056, 138:-0.18256, 137:-0.08768,
                136:-0.19559, 135: 0.15040, 134:-0.27681, 133:-0.13469, 132: 0.14161, 131:-0.15445, 130:-0.01133, 129: 0.03761,
                128: 0.18891, 127:-0.07532, 126: 0.12125, 125: 0.11565, 124: 0.22430, 123: 0.22008, 122:-0.08070, 121:-0.18887,
                120:-0.13159, 119: 0.26815, 118: 0.14090, 117:-0.07888, 116:-0.11229, 115:-0.15217, 114:-0.06658, 113: 0.15482,
                112:-0.13988, 111: 0.50768, 110: 0.29590, 109: 0.10986, 108: 0.05922, 107: 0.19017, 106: 0.25759, 105: 0.10858,
                104: 0.12594, 103:-0.29317, 102:-0.34908, 101:-0.30159, 100: 0.33526,  99:-0.04471,  98: 0.43860,  97:-0.10559,
                 96: 0.19658,  95:-0.31246,  94: 0.41989,  93:-0.35018,  92:-0.08708,  91: 0.05431,  90:-0.33252,  89:-0.02753,
                 88: 0.04044,  87: 0.26833,  86:-0.34337,  85: 0.29855,  84:-0.32477,  83:-0.02032,  82:-0.06057,  81:-0.11322,
                 80: 0.02794,  79:-0.03900,  78:-0.21132,  77:-0.13260,  76: 0.18201,  75:-0.25694,  74:-0.23131,  73:-0.29685,
                 72: 0.19498,  71: 0.08130,  70:-0.04146,  69:-0.04448,  68: 0.32396,  67: 0.03568,  66: 0.15865,  65:-0.34358,
                 64:-0.19802,  63: 0.46185,  62:-0.18292,  61:-0.38272,  60: 0.01987,  59:-0.11336,  58: 0.11684,  57:-0.26052,
                 56:-0.27765,  55: 0.33572,  54:-0.17565,  53: 1.04222,  52:-0.13083,  51: 0.03533,  50:-0.10124,  49: 0.42525,
                 48: 0.14862,  47: 0.01310,  46: 0.27100,  45:-0.25615,  44: 0.29663,  43: 0.04623,  42:-0.06321,  41:-0.05596,
                 40: 0.14647,  39:-0.04754,  38: 0.27204,  37:-0.08451,  36:-0.26646,  35:-0.27756,  34: 0.10655,  33: 0.11385,
                 32:-0.01991,  31: 0.11086,  30:-0.02755,  29:-0.28642,  28:-0.32812,  27: 0.01069,  26: 0.12208,  25:-0.03904,
                 24: 0.32555,  23:-0.23981,  22: 0.19537,  21:-0.07039,  20:-0.13281,  19:-0.16505,  18:-0.05692,  17:-0.01968,
                 16: 0.06834,  15:-0.22804,  14:-0.15798,  13:-0.00184,  12:-0.15475,  11: 0.31434,  10:-0.19855,   9:-0.26497,
                  8: 0.28706,   7: 0.06602,   6: 0.22902,   5:-0.13532,   4:-0.21723,   3: 0.06886,   2:-0.24007,   1: 0.30851
   606 |      | 605:-2.01494, 604: 4.32118, 603: 1.05604, 602:-2.60890, 601: 1.65098
-------|------|----------------------------------------------------------------------------------------------------------------
predictions <- predict(model,test_set.norm.X)

mlpendtime <- Sys.time()

predictions <- predictions * (maxteStr_h_texture - minteStr_h_texture)
predictions <- round(predictions,0)
mlptable <- table(test_set$Str_h_texture,predictions)
mlprow <-rownames(mlptable)
mlpcol <- colnames(mlptable)
mlpscore <- sumElementinTable(mlptable,mlprow,mlpcol)/sum(mlptable)

mlptakentime <- mlpendtime - mlpstarttime
  cat('The score of MLP is ', mlpscore,'\n')
The score of MLP is  0.02635098 
  cat('It takes ', mlptakentime,'seconds')
It takes  58.56548 seconds

Algorithms that cannot run in a specific time

neural network

We can use neuralnet() to train a NN model. Also, the train() function from caret can help us tune parameters. We can plot the result to see which set of parameters is fit our data the best.

tuning parameter

Model <- train(Str_h_texture ~ .,
               data=train_set,
               method="neuralnet",
               ### Parameters for layers
               tuneGrid = expand.grid(.layer1=c(1:2), .layer2=c(0:2), .layer3=c(0)),
               ### Parameters for optmization
               learningrate = 0.01,
               threshold = 0.01,
               stepmax = 5000
)

in nnclassifier y value should be normalized

train_set.norm <- train_set
maxStr_h_texture <- max(train_set.norm$Str_h_texture)
minStr_h_texture <- min(train_set.norm$Str_h_texture)
train_set.norm$Str_h_texture <- normalize(train_set.norm$Str_h_texture)

nnClassifier <- neuralnet(Str_h_texture ~ .,data=train_set.norm, likelihood = TRUE, 
                          hidden = 1,linear.output = F,act.fct = "tanh")
print(nnClassifier$result.matrix)
plot(nnClassifier)

prediction

output<- compute(nnClassifier,train_set[,-1])
p1 <- output$net.result
p1 <- p1 * (maxStr_h_texture-minStr_h_texture)
p1 <- round(p1,0)
nntable<-  table(train_set$Str_h_texture,p1)

Classification with xgBoost

Xgboost can work perfectly in sparse matrix but it unfortunately cannot run in 5 hours

xgb.train = xgb.DMatrix(data = as.matrix(train_set),label =as.matrix(train_set$Str_h_texture))
xgb.test = xgb.DMatrix(data = as.matrix(test_set),label = as.matrix(test_set$Str_h_texture))
validsoilTexture$Str_h_texture <- as.factor(validsoilTexture$Str_h_texture)
num_class = length(levels(validsoilTexture$Str_h_texture))

params = list(
  booster="gbtree",
  eta=0.001,
  max_depth=5,
  gamma=3,
  subsample=0.75,
  colsample_bytree=1,
  objective="multi:softprob",
  eval_metric="mlogloss",
  num_class=num_class+1
)

# Train the XGBoost classifer
xgb.fit=xgb.train(
  params=params,
  data=xgb.train,
  nrounds=10000,
  nthreads=1,
  early_stopping_rounds=10,
  watchlist=list(val1=xgb.train,val2=xgb.test),
  verbose=0
)

xgb.fit

Algorithms that cannot run successfully

Random Forest* The algorithm cannot run successfully since it will give an Error: cannot allocate vector of size 16.5 Gb random forest is bad for sparse data which can be found in https://stats.stackexchange.com/questions/28828/is-there-a-random-forest-implementation-that-works-well-with-very-sparse-data

RfClassifier = randomForest(Str_h_texture ~ .,data = train_set,proximity = T,mtry = 10)

rfTable <- table(predict(RfClassifier),train_set$Str_h_texture)

print(RfClassifier)
plot(RfClassifier)
---
title: "machine learning documentation in R"
output: html_notebook
---
```{r}
setwd("C:/Users/horat/Desktop/CSIROIntership/soilCode")


library(dplyr)

#create pivot table 
library(reshape)
library(data.table)

#data partition seperate trainset and testset
library (caTools)

library(caret)

#svm library due to limitation of iterations change the library
library(e1071)
library(LiblineaR)

#random forest
library(randomForest)

#ID4 Decision Tree classifier(CART)
library(rpart)
library(rpart.plot)
library(rattle)

#xgboost
library(xgboost)

#for knn classification
library(class)

#install neuralnetwork
library(neuralnet)

#adabag library
library(adabag)

#Stochastic Gradient Descent (SGD) Method Learning Function
library(gradDescent)
library(lightgbm)
#https://www.kaggle.com/c/amazon-employee-access-challenge/discussion/5128#38925

#matrix library
library(Matrix)

#catboost
library(catboost)

#fast naive bayes
library("fastNaiveBayes")

#tidyverse for easy data manipulation and visualization
#caret for easy machine learning workflow

#mlp
library(RSNNS)

library(tidyverse)
library(caret)

featureSoilTable <- read.csv(file = "featureTable.csv",stringsAsFactors=FALSE)
```
# Grouping data in a Pivot Table
```{r}
print(head(featureSoilTable))
```
# create the normalize function
```{r}
normalize <-function(y) {
  
  x<-y[!is.na(y)]
  
  x<-(x - min(x)) / (max(x) - min(x))
  
  y[!is.na(y)]<-x
  
  return(y)
}
```
# preprocessing of the featuring table
```{r}
#change the NULL to na
featureSoilTable['h_texture'][featureSoilTable['h_texture'] == "NULL"] <- NA
#add appendix to colname to avoid mis-understand of the title of dataframe
colnames(featureSoilTable) <- paste("Str",colnames(featureSoilTable),sep = "_")
```
# print out the head of featureSoilTable
```{r}
print(head(featureSoilTable))
```
# remove invalid value and set NA value to 0
```{r}
#extract valid and invalid soil sample
validsoilTexture <- featureSoilTable[!is.na(featureSoilTable$Str_h_texture),]
invalidsoilTexture <- featureSoilTable[is.na(featureSoilTable$Str_h_texture),]

# remove columns that only have nas
validsoilTexture <- validsoilTexture[,colSums(is.na(validsoilTexture))<nrow(validsoilTexture)]
#remove rows have less than 4 data
contribution <- as.data.frame(rowsum(rep(1,times = length(validsoilTexture$Str_h_texture)), validsoilTexture$Str_h_texture),row.names = count)
label <- sort(unique(validsoilTexture$Str_h_texture))
contribution <- cbind(label,contribution)
invaliddata <- contribution[contribution$V1 < 4,]

for (l in invaliddata$label){
  rowlist = which(validsoilTexture$Str_h_texture == l)
  #print(rowlist)
  validsoilTexture <- validsoilTexture[-rowlist,]

}
```
# set x to numeric
```{r}
validsoilTexture$Str_h_texture <- as.numeric(as.factor(validsoilTexture$Str_h_texture))
validsoilTexture[,-1] <- apply(apply(validsoilTexture[,-1], 2, as.factor), 2, as.numeric)
validsoilTexture[,-1]<- (apply(validsoilTexture[,-1],2,normalize))
validsoilTexture <- as.data.frame(validsoilTexture)
#change null value to 0
validsoilTexture[is.na(validsoilTexture)] = 0

ncol <- ncol(validsoilTexture)
```
# print out the head of validsoilTexture
```{r}
print(head(validsoilTexture))
```
# set random seed
```{r}
set.seed(122)
```
# give the valid sample
```{r}
split = sample.split(validsoilTexture$Str_h_texture,SplitRatio = 0.7)

train_set = subset(validsoilTexture, split == TRUE)
test_set = subset(validsoilTexture, split == FALSE)

train_set$Str_h_texture = as.numeric(train_set$Str_h_texture)
test_set$Str_h_texture = as.numeric(test_set$Str_h_texture)
```

```{r}
summary(train_set)
```

```{r}
# Find the best model with the best cost parameter via 10-fold cross-validations

# the tunning part of svm, which will take lots of time to run

tryTypes=c(0:7)
tryCosts=c(1000,1,0.001)
bestCost=NA
bestAcc=0.6290723
bestType=NA

for(ty in tryTypes){

   for(co in tryCosts){
    acc=LiblineaR(data=train_set[,-1],target=train_set[,c("Str_h_texture")],type=7,cost=co,bias=1,verbose=FALSE)
    cat("Results for C=",co," : ",acc," accuracy.\n",sep="")
    if(acc>bestAcc){
    bestCost=co
    bestAcc=acc
    bestType=ty
    }
  }

}

```
# svm classifier
LIBLINEAR is a linear classifier for data with millions of instances and features. It supports L2-regularized classifiers, L2-loss linear SVM, L1-loss linear SVM, and logistic regression (LR).LiblineaR allows the estimation of predictive linear models for classification and regression, such as L1- or L2-regularized logistic regression, L1- or L2-regularized L2-loss support vector classification, L2-regularized L1-loss support vector classification and multi-class support vector classification. It also supports L2-regularized support vector regression (with L1- or L2-loss). The estimation of the models is particularly fast as compared to other libraries. 
```{r}
svmStarttime <- Sys.time()
svmClassifier <- LiblineaR(data = train_set[,-1],target = train_set[,c("Str_h_texture")],bias=1,cost = 1000)
svmPredictTrain <- predict(svmClassifier,train_set[,-1],proba=TRUE,decisionValues=TRUE)
svmPredictTrainTable <- table(svmPredictTrain$predictions,train_set[,c("Str_h_texture")])
svmEndtime <- Sys.time()
svmTimeTaken <- svmEndtime - svmStarttime
svmPredictTest <- predict(svmClassifier,test_set[,-1],proba=TRUE,decisionValues=TRUE)
svmPredictTestTable <- table(svmPredictTest$predictions,test_set[,c("Str_h_texture")])
```
# function for calculating the score of matirx
```{r}
sumElementinTable <- function(a,c,r){
  sum = 0
  for (i in c){
    if (i %in% r){
      sum = sum + a[i,i]
    }
  }
  return(sum)
}

```
# calculating the score of svmClassifier
```{r}

svmTestcol <- colnames(svmPredictTestTable)
svmTestrow <- rownames(svmPredictTestTable)

svmTraincol <- colnames(svmPredictTrainTable)
svmTrainrow <- rownames(svmPredictTrainTable)


svmPredictTestScore <- sumElementinTable(svmPredictTestTable,svmTestcol,svmTestrow)/sum(svmPredictTestTable)
svmPredictTrainScore <- sumElementinTable(svmPredictTrainTable,svmTraincol,svmTrainrow)/sum(svmPredictTrainTable)

```

```{r}
# the time of svm is:
cat("the running time of svm is",svmTimeTaken, "seconds")
```
```{r}
#the score of svm is

cat("The train score of svm algorithm is ",svmPredictTrainScore,'\n')

cat("The test score of svm algorithm is ",svmPredictTestScore)

```

# classification is CART model
```{r}
cartFit <- rpart(Str_h_texture ~ .,data = train_set,control = rpart.control(cp = 0.0001))

#get cp value
printcp(cartFit)
```

choose the CP with lowest xerror

```{r}
cartstartTime <- Sys.time()

fit.pruned = prune(cartFit, cp = 0.00021967)

cartPrediction <- predict(fit.pruned, test_set, type = "vector")

cartendTime <- Sys.time()

cartTimeTaken <- cartendTime - cartstartTime

data.frame(test_set,cartPrediction)

cartPrediction = round(cartPrediction,0)
cartTable <- table(test_set$Str_h_texture,cartPrediction)

cartTable
```

calculate the score of cart model
```{r}
cartrow <- rownames(cartTable)
cartcol <- colnames(cartTable)
cartscore <- sumElementinTable(cartTable,cartrow,cartcol)/sum(cartTable)

```

the time of cart model
```{r}
cat("the time of cart",cartTimeTaken , "seconds")
```
the score of cart model

```{r}
cat('the score of cart model',cartscore)

```



# lightgbm

separate x and y from train_set and test_set 
```{r}

train_set.num_X <- select (train_set,-c(Str_h_texture))
test_set.num_X <- select (test_set,-c(Str_h_texture))

```

start lightgbm machine learning algorithms
```{r}
lstarttime <- Sys.time()
ltrain = lgb.Dataset(data = as.matrix(train_set.num_X),label = train_set$Str_h_texture, free_raw_data = FALSE)
params <- list(objective="regression", metric="l2")
model <- lgb.cv(params, 
                ltrain , 
                10, 
                nfold=5, 
                min_data=1, 
                learning_rate=1, 
                early_stopping_rounds=10,
                Depth = 8,
                lambda_l1 = 10,
                lambda_l2 = 10
)
lstoptime <- Sys.time()
```

# tunning parameters

num_leaves: This is the main parameter to control the complexity of the tree model. Theoretically, we can set num_leaves = 2^(max_depth) to obtain the same number of leaves as depth-wise tree. However, this simple conversion is not good in practice. The reason is that a leaf-wise tree is typically much deeper than a depth-wise tree for a fixed number of leaves. Unconstrained depth can induce over-fitting. Thus, when trying to tune the num_leaves, we should let it be smaller than 2^(max_depth). For example, when the max_depth=7 the depth-wise tree can get good accuracy, but setting num_leaves to 127 may cause over-fitting, and setting it to 70 or 80 may get better accuracy than depth-wise.

min_data_in_leaf: This is a very important parameter to prevent over-fitting in a leaf-wise tree. Its optimal value depends on the number of training samples and num_leaves. Setting it to a large value can avoid growing too deep a tree, but may cause under-fitting. In practice, setting it to hundreds or thousands is enough for a large dataset.

max_depth: You also can use max_depth to limit the tree depth explicitly.

```{r}
ltest = lgb.Dataset.create.valid(ltrain , as.matrix(test_set.num_X), label = test_set$Str_h_texture)
valids <- list(test = ltest)

grid_search <- expand.grid(Depth = 7:8,
                           L1 = 8:12,
                           L2 = 8:12)

model <- list()
perf <- numeric(nrow(grid_search))

for (i in 1:nrow(grid_search)) {
  model[[i]] <- lgb.train(list(objective = "regression",
                               metric = "l2",
                               lambda_l1 = grid_search[i, "L1"],
                               lambda_l2 = grid_search[i, "L2"],
                               max_depth = grid_search[i, "Depth"]),
                          ltrain,
                          2,
                          valids,
                          min_data = 1,
                          learning_rate = 1,
                          early_stopping_rounds = 5,
                          num_leaves = 2,
                          num_iterations = 100,
                          min_gain_to_split = 500,)
  
  perf[i] <- min(rbindlist(model[[i]]$record_evals$test$l2))
}

cat("Model ", which.min(perf), " is lowest loss: ", min(perf), sep = "")

print(grid_search[which.min(perf), ])
```

Algorithms score is around 0.3 and computational time is:
```{r}
lgbtaketime <- lstoptime - lstarttime
cat("The algorithms takes ", lgbtaketime, "seconds")

```

# catboost
```{r}

catstartTime <- Sys.time()

fit_params <- list(l2_leaf_reg = 0.001,
                   depth=6,
                   learning_rate = 0.1,
                   iterations = 100,
                   random_seed = 233)


pool = catboost.load_pool(as.matrix(train_set.num_X), label = as.integer(train_set[,1]))

model <- catboost.train(pool, params = fit_params)

catstopTime <- Sys.time()

cattakenTime <- catstopTime - catstartTime
```

calculate the prediction:
```{r}
#get the prediction
catprediction <- catboost.predict(model, 
                                  pool, 
                                  prediction_type = 'RawFormulaVal')
```

calculate the program score:
```{r}
#round the prediction
catprediction <- round(catprediction,0)

catTable <- table(train_set$Str_h_texture,catprediction)

catTablerow <- rownames(catTable)
catTablecol <- colnames(catTable)
catscore <- sumElementinTable(catTable,catTablerow,catTablecol)/sum(catTable)

```

```{r}
cat('The algorithm takes' ,cattakenTime , 'seconds')
```


```{r}
cat('The algorithm scores' ,catscore)
```
## naivebayes classification*
```{r}

nbstarttime <- Sys.time()
  
nbClassifier <- naiveBayes(as.factor(Str_h_texture) ~ .,data = train_set,laplace=2)
nbTestPrediction <- predict(nbClassifier,test_set,type = "class")
nbTableTest <- table(nbTestPrediction,test_set$Str_h_texture)

nbTestTablerow <- rownames(nbTableTest)
nbTestTablecol <- colnames(nbTableTest)
nbTestTablescore<- sumElementinTable(nbTableTest,nbTestTablerow,nbTestTablecol)/sum(nbTableTest)

nbendtime <- Sys.time()

nbTrainPrediction <- predict(nbClassifier,train_set,type = "class")
nbTrainTable <- table(nbTrainPrediction,train_set$Str_h_texture)

nbTrainTablerow <- rownames(nbTrainTable)
nbTrainTablecol <- colnames(nbTrainTable)
nbTrainTablescore <- sumElementinTable(nbTrainTable,nbTrainTablerow,nbTrainTablecol)/sum(nbTrainTable)

nbtakentime <- nbendtime - nbstarttime

```
# nbalgorithm 
```{r}
cat('NaiveBayes takes',nbtakentime,'seconds')

```

# nbscore
```{r}
cat('NaiveBayes score',nbTrainTablescore)

```


# fastNaiveBayes algorithms by gaussian
```{r}
fnbstartTime <- Sys.time()
dist <- fnb.detect_distribution(train_set.num_X)
gauss <- fnb.gaussian(train_set.num_X[,dist$gaussian], as.factor(train_set$Str_h_texture),sparse = TRUE,check = FALSE)
pred <- predict(gauss, train_set.num_X[,dist$gaussian])
fnbendTime <- Sys.time()
error <- mean(as.factor(train_set$Str_h_texture)!=pred)
print(error)
fnbtakentime <- fnbendTime - fnbstartTime
```

```{r}

cat("fastNaiveBayes takes ", round(fnbtakentime,6), "seconds")
```

# MLP algorithm (a subsitute algorithm for nn classifier)
Data preprocessing
```{r}
train_set.norm <- train_set
maxStr_h_texture <- max(train_set.norm$Str_h_texture)
minStr_h_texture <- min(train_set.norm$Str_h_texture)
train_set.norm$Str_h_texture <- normalize(train_set.norm$Str_h_texture)
train_set.norm.X <- train_set.norm[,-1]

test_set.norm <- test_set
maxteStr_h_texture <- max(test_set.norm$Str_h_texture)
minteStr_h_texture <- min(test_set.norm$Str_h_texture)
test_set.norm$Str_h_texture <- normalize(test_set.norm$Str_h_texture)
test_set.norm.X <- test_set.norm[,-1]

```

```{r}
mlpstarttime <- Sys.time()

model <- mlp(train_set.norm.X, train_set.norm$Str_h_texture, size=5, learnFuncParams=c(0.1), 
             maxit=50, inputsTest=test_set.norm.X, targetsTest=test_set.norm$Str_h_texture)

summary(model)

predictions <- predict(model,test_set.norm.X)

mlpendtime <- Sys.time()

predictions <- predictions * (maxteStr_h_texture - minteStr_h_texture)
predictions <- round(predictions,0)
mlptable <- table(test_set$Str_h_texture,predictions)
mlprow <-rownames(mlptable)
mlpcol <- colnames(mlptable)
mlpscore <- sumElementinTable(mlptable,mlprow,mlpcol)/sum(mlptable)

mlptakentime <- mlpendtime - mlpstarttime
```

```{r}
  cat('The score of MLP is ', mlpscore,'\n')
  cat('It takes ', mlptakentime,'seconds')

```

# Algorithms that cannot run in a specific time

# neural network

We can use neuralnet() to train a NN model. Also, the train() function from caret can help us tune parameters.
We can plot the result to see which set of parameters is fit our data the best.

tuning parameter
```{r}
Model <- train(Str_h_texture ~ .,
               data=train_set,
               method="neuralnet",
               ### Parameters for layers
               tuneGrid = expand.grid(.layer1=c(1:2), .layer2=c(0:2), .layer3=c(0)),
               ### Parameters for optmization
               learningrate = 0.01,
               threshold = 0.01,
               stepmax = 5000
)
```

in nnclassifier y value should be normalized
```{r}
train_set.norm <- train_set
maxStr_h_texture <- max(train_set.norm$Str_h_texture)
minStr_h_texture <- min(train_set.norm$Str_h_texture)
train_set.norm$Str_h_texture <- normalize(train_set.norm$Str_h_texture)

nnClassifier <- neuralnet(Str_h_texture ~ .,data=train_set.norm, likelihood = TRUE, 
                          hidden = 1,linear.output = F,act.fct = "tanh")
print(nnClassifier$result.matrix)
plot(nnClassifier)
```

prediction
```{r}
output<- compute(nnClassifier,train_set[,-1])
p1 <- output$net.result
p1 <- p1 * (maxStr_h_texture-minStr_h_texture)
p1 <- round(p1,0)
nntable<-  table(train_set$Str_h_texture,p1)

```

# Classification with xgBoost
Xgboost can work perfectly in sparse matrix but it unfortunately cannot run in 5 hours

```{r}
xgb.train = xgb.DMatrix(data = as.matrix(train_set),label =as.matrix(train_set$Str_h_texture))
xgb.test = xgb.DMatrix(data = as.matrix(test_set),label = as.matrix(test_set$Str_h_texture))
validsoilTexture$Str_h_texture <- as.factor(validsoilTexture$Str_h_texture)
num_class = length(levels(validsoilTexture$Str_h_texture))

params = list(
  booster="gbtree",
  eta=0.001,
  max_depth=5,
  gamma=3,
  subsample=0.75,
  colsample_bytree=1,
  objective="multi:softprob",
  eval_metric="mlogloss",
  num_class=num_class+1
)

# Train the XGBoost classifer
xgb.fit=xgb.train(
  params=params,
  data=xgb.train,
  nrounds=10000,
  nthreads=1,
  early_stopping_rounds=10,
  watchlist=list(val1=xgb.train,val2=xgb.test),
  verbose=0
)

xgb.fit

```

# Algorithms that cannot run successfully 
Random Forest* The algorithm cannot run successfully since it will give an Error: cannot allocate vector of size 16.5 Gb
random forest is bad for sparse data which can be found in https://stats.stackexchange.com/questions/28828/is-there-a-random-forest-implementation-that-works-well-with-very-sparse-data
```{r}
RfClassifier = randomForest(Str_h_texture ~ .,data = train_set,proximity = T,mtry = 10)

rfTable <- table(predict(RfClassifier),train_set$Str_h_texture)

print(RfClassifier)
plot(RfClassifier)
```



